From 4cc6decf369dd6720ab59bf4125d3444237bf0aa Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 18 Feb 2014 08:41:29 -0500 Subject: [PATCH 01/48] Add first draft of new ingest module interfaces --- .../ingest/DataSourceIngestModule.java | 29 ++++++++++++ .../autopsy/ingest/FileIngestModule.java | 29 ++++++++++++ .../autopsy/ingest/IngestModule.java | 46 +++++++++++++++++++ 3 files changed, 104 insertions(+) create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java new file mode 100755 index 0000000000..9eaec70738 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -0,0 +1,29 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import org.sleuthkit.datamodel.Content; + +/** + * Interface that must be implemented by all data source ingest modules. + */ +public interface DataSourceIngestModule extends IngestModule { + void process(Content dataSource); +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java new file mode 100755 index 0000000000..b594284c06 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -0,0 +1,29 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import org.sleuthkit.datamodel.AbstractFile; + +/** + * Interface that must be implemented by all data source ingest modules. + */ +public interface FileIngestModule { + void process(AbstractFile file); +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java new file mode 100755 index 0000000000..baf9c3e478 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -0,0 +1,46 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +/** + * Interface that must be implemented by all ingest modules. + */ +public interface IngestModule { + /** + * Called to allow an ingest module to initialize itself before commencing + * processing. + * + * @param schedulingToken A module that uses the scheduling service to + * schedule additional processing needs to supply its scheduling token to + * the scheduler. For example, a module that extracts files from an archive + * may schedule ingest of those files using the module's scheduling token. + */ + void init(int schedulingToken); + + /** + * RJCTODO + */ + void complete(); + + /** + * RJCTODO + */ + void stop(); +} From 3b23d6e4cd18090cf6f076213f8e074cb7f7757a Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 19 Feb 2014 17:54:59 -0500 Subject: [PATCH 02/48] Completed first draft of interfaces for parallel file ingest --- .../ingest/DataSourceIngestModule.java | 9 +- .../autopsy/ingest/FileIngestModule.java | 5 + .../autopsy/ingest/IngestModule.java | 37 ++-- .../autopsy/ingest/IngestModuleFactory.java | 164 ++++++++++++++++++ 4 files changed, 202 insertions(+), 13 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 9eaec70738..210ca983fb 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -25,5 +25,12 @@ import org.sleuthkit.datamodel.Content; * Interface that must be implemented by all data source ingest modules. */ public interface DataSourceIngestModule extends IngestModule { - void process(Content dataSource); + + /** + * Process a data source. + * @param dataSource The data source to process. + * @param statusHelper A status helper to be used to report progress and + * detect task cancellation. + */ + void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index b594284c06..c77eb61db0 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -25,5 +25,10 @@ import org.sleuthkit.datamodel.AbstractFile; * Interface that must be implemented by all data source ingest modules. */ public interface FileIngestModule { + + /** + * Process a file. + * @param file The file to process. + */ void process(AbstractFile file); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index baf9c3e478..4a2653effc 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -23,24 +23,37 @@ package org.sleuthkit.autopsy.ingest; * Interface that must be implemented by all ingest modules. */ public interface IngestModule { - /** - * Called to allow an ingest module to initialize itself before commencing - * processing. - * - * @param schedulingToken A module that uses the scheduling service to - * schedule additional processing needs to supply its scheduling token to - * the scheduler. For example, a module that extracts files from an archive - * may schedule ingest of those files using the module's scheduling token. - */ - void init(int schedulingToken); /** - * RJCTODO + * Invoked to allow an ingest module to set up internal data structures and + * acquire any private resources it will need during a single ingest of a + * particular data source. IMPORTANT: There will usually be more than one + * instance of a module executing, but it is guaranteed that there will be + * no more than one instance of the module per thread. However, if these + * instances must share resources, the modules are responsible for + * synchronizing access to the shared resources and doing reference counting + * as required to release the resources correctly. + * @param dataSourceTaskId A module that uses the scheduling service to + * schedule additional processing needs to supply its data source task ID to + * the scheduler. For example, a module that extracts files from an archive + * discovered in a data source may schedule ingest of those files using the + * data source task ID. + */ + void init(long dataSourceTaskId); + + /** + * Invoked when a single ingest of a particular data source is completed. + * The module should tear down internal data sources, release private + * resources, submit final results, and post a final ingest message. The + * module will be discarded when this method returns. */ void complete(); /** - * RJCTODO + * Invoked when a single ingest of a particular data source is canceled. + * The module should tear down internal data sources and release private + * resources, discard unsubmitted results, and post a final ingest message. + * The module will be discarded when this method returns. */ void stop(); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java new file mode 100755 index 0000000000..da252fe91b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java @@ -0,0 +1,164 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import java.io.Serializable; +import javax.swing.JPanel; + +/** + * An interface that must be implemented by all providers of ingest modules. + * An IngestModuleFactory will be used as a stateless source of one or more + * instances of a family of configurable ingest modules. IngestModuleFactory + * implementations must be marked with the NetBeans Service provider annotation + * as follows: + * @ServiceProvider(service=IngestModuleFactory.class) + */ +public interface IngestModuleFactory { + + class InvalidOptionsException extends Exception { + public InvalidOptionsException(String message) { + super(message); + } + } + + /** + * Gets the display name that identifies the family of ingest modules the + * factory creates. + * @return The module display name as a string. + */ + String getModuleDisplayName(); + + /** + * Gets a brief, user-friendly description of the family of ingest modules + * the factory creates. + * @return The module description as a string. + */ + String getModuleDescription(); + + /** + * Gets the version number of the family of ingest modules the factory + * creates. + * @return The module version number as a string. + */ + String getModuleVersionNumber(); + + /** + * Gets the default ingest options for instances of the family of ingest + * modules the factory creates. Ingest options are serializable to support + * the persistence of possibly different options for different module + * execution contexts. + * @return The ingest options in serializable form. + */ + Serializable getDefaultIngestOptions(); + + /** + * Queries the factory to determine if it provides user interface panels + * that can be used to specify the ingest options for instances of the + * family of ingest modules the factory creates. + * @return True if the factory provides ingest options panels. + */ + boolean providesIngestOptionsPanels(); + + /** + * Gets a user interface panel that can be used to specify the ingest + * options for instances of the family of ingest modules the factory + * creates. + * @param ingestOptions A set of ingest options to be used to initialize the + * panel. + * @return A user interface panel. It is assumed that the factory is + * stateless and will not hold a reference to the panel. + */ + JPanel getIngestOptionsPanel(Serializable ingestOptions); + + /** + * Gets ingest options for instances of the family of ingest modules the + * factory creates from an ingest options panel. Ingest options are + * serializable to support the persistence of possibly different options for + * different module execution contexts. + * @param ingestOptionsPanel The ingest options panel. + * @return The ingest options from the panel in serializable form. + * @throws org.sleuthkit.autopsy.ingest.IngestModuleFactory.InvalidOptionsException + */ + Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws InvalidOptionsException; + + /** + * Queries the factory to determine if it provides user interface panels + * that can be used to specify global options for all instances of the + * family of ingest modules the factory creates. + * @return True if the factory provides global options panels. + */ + boolean providesGlobalOptionsPanels(); + + /** + * Gets a user interface panel that can be used to specify the global + * options for all instances of the family of ingest modules the factory + * creates. PLEASE TAKE NOTICE: The factory should initialize the panel from + * its own persistence of global options to disk in the directory returned + * by PlatformUtil.getUserConfigDirectory(). In the future, this method will + * be deprecated and the factory will be expected to receive global options + * in serializable form. + * @return A user interface panel. It is assumed that the factory is + * stateless and will not hold a reference to the panel. + */ + JPanel getGlobalOptionsPanel(); + + /** + * Get the global options for instances of the family of ingest modules + * the factory creates from a global options panel and saves the options to + * persistent storage on disk in the directory returned by + * PlatformUtil.getUserConfigDirectory(). PLEASE TAKE NOTICE: In the future, + * this method will be deprecated and the factory will be expected to supply + * global options in serializable form in a getGlobalOptionsFromPanel() + * method. + * @param globalOptionsPanel + * @throws org.sleuthkit.autopsy.ingest.IngestModuleFactory.InvalidOptionsException + */ + void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws InvalidOptionsException; + + /** + * Queries the factory to determine if it is capable of creating file ingest + * modules. + * @return True if the factory can create file ingest modules. + */ + boolean isDataSourceIngestModuleFactory(); + + /** + * Creates a data source ingest module. + * @param ingestOptions The ingest options to use to create the module. + * @return An instance of a data source ingest module created using the + * provided ingest options. + */ + DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws InvalidOptionsException; + + /** + * Queries the factory to determine if it is capable of creating data source + * ingest modules. + * @return True if the factory can create data source ingest modules. + */ + boolean isFileIngestModuleFactory(); + + /** + * Creates a data source ingest module. + * @param ingestOptions The ingest options to use to create the module. + * @return An instance of a data source ingest module created using the + * provided ingest options. + */ + FileIngestModule createFileIngestModule(Serializable ingestOptions) throws InvalidOptionsException; +} From 767d57689111a81fbeddc4d34e9c13f00e3aad54 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 19 Feb 2014 18:19:04 -0500 Subject: [PATCH 03/48] Added new AbstractIngestModuleFactory class to ingest package --- .../ingest/AbstractIngestModuleFactory.java | 98 +++++++++++++++++++ 1 file changed, 98 insertions(+) create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/AbstractIngestModuleFactory.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/AbstractIngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/ingest/AbstractIngestModuleFactory.java new file mode 100755 index 0000000000..a7197447f0 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/AbstractIngestModuleFactory.java @@ -0,0 +1,98 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import java.io.Serializable; +import javax.swing.JPanel; + +/** + * An abstract class that provides no-op implementations of various + * IngestModuleFactory methods. Provided for the convenience of ingest module + * developers. + */ +public abstract class AbstractIngestModuleFactory implements IngestModuleFactory { + + @Override + public abstract String getModuleDisplayName(); + + @Override + public abstract String getModuleDescription(); + + @Override + public abstract String getModuleVersionNumber(); + + @Override + public Serializable getDefaultIngestOptions() { + return new EmptyIngestOptions(); + } + + @Override + public boolean providesIngestOptionsPanels() { + return false; + } + + @Override + public JPanel getIngestOptionsPanel(Serializable ingestOptions) { + throw new UnsupportedOperationException(); + } + + @Override + public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws InvalidOptionsException { + throw new UnsupportedOperationException(); + } + + @Override + public boolean providesGlobalOptionsPanels() { + return false; + } + + @Override + public JPanel getGlobalOptionsPanel() { + throw new UnsupportedOperationException(); + } + + @Override + public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws InvalidOptionsException { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isDataSourceIngestModuleFactory() { + return false; + } + + @Override + public DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws InvalidOptionsException { + throw new UnsupportedOperationException(); + } + + @Override + public boolean isFileIngestModuleFactory() { + return false; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws InvalidOptionsException { + throw new UnsupportedOperationException(); + } + + public static class EmptyIngestOptions implements Serializable { + } +} From d2e2ee5cc5a0b99619152cac67aec3dc0a25f084 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 19 Feb 2014 18:21:29 -0500 Subject: [PATCH 04/48] Add skeleton of HashLookupModuleFactory to HashDatabase nbm --- .../hashdatabase/HashLookupModuleFactory.java | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100755 HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java new file mode 100755 index 0000000000..157b4ade5e --- /dev/null +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java @@ -0,0 +1,49 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.hashdatabase; + +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do hash database lookups. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class HashLookupModuleFactory extends AbstractIngestModuleFactory { + private final static String MODULE_NAME = "Hash Lookup"; + private final static String MODULE_DESCRIPTION = "Identifies known and notables files using supplied hash databases, such as a standard NSRL database."; + + @Override + public String getModuleDisplayName() { + return MODULE_NAME; + } + + @Override + public String getModuleDescription() { + return MODULE_DESCRIPTION; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } +} From e317129722e822333ca6b73c09131119034427fa Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 20 Feb 2014 12:30:16 -0500 Subject: [PATCH 05/48] Complete method stubs for HashLookupModuleFactory --- .../hashdatabase/HashLookupModuleFactory.java | 80 +++++++++++++++++-- 1 file changed, 74 insertions(+), 6 deletions(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java index 157b4ade5e..bea51cc485 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java @@ -19,31 +19,99 @@ package org.sleuthkit.autopsy.hashdatabase; +import java.io.Serializable; +import java.util.ArrayList; +import javax.swing.JPanel; +import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestModuleFactory; /** * An factory that creates file ingest modules that do hash database lookups. */ @ServiceProvider(service=IngestModuleFactory.class) -public class HashLookupModuleFactory extends AbstractIngestModuleFactory { - private final static String MODULE_NAME = "Hash Lookup"; - private final static String MODULE_DESCRIPTION = "Identifies known and notables files using supplied hash databases, such as a standard NSRL database."; - +public class HashLookupModuleFactory extends AbstractIngestModuleFactory { @Override public String getModuleDisplayName() { - return MODULE_NAME; + return NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); } @Override public String getModuleDescription() { - return MODULE_DESCRIPTION; + return NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleDescription"); } @Override public String getModuleVersionNumber() { return Version.getVersion(); } + + @Override + public Serializable getDefaultIngestOptions() { + return new IngestOptions(); + } + + @Override + public boolean providesIngestOptionsPanels() { + return true; + } + + @Override + public JPanel getIngestOptionsPanel(Serializable ingestOptions) { + HashDbSimpleConfigPanel ingestOptionsPanel = new HashDbSimpleConfigPanel(); + ingestOptionsPanel.load(); + return ingestOptionsPanel; + } + + @Override + public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws InvalidOptionsException { + if (!(ingestOptionsPanel instanceof HashDbSimpleConfigPanel)) { + throw new InvalidOptionsException(""); // RJCTODO + } + + HashDbSimpleConfigPanel panel = (HashDbSimpleConfigPanel)ingestOptionsPanel; + panel.store(); + + return new IngestOptions(); // RJCTODO + } + + @Override + public boolean providesGlobalOptionsPanels() { + return true; + } + + @Override + public JPanel getGlobalOptionsPanel() { + HashDbConfigPanel globalOptionsPanel = new HashDbConfigPanel(); + globalOptionsPanel.load(); + return globalOptionsPanel; + } + + @Override + public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws InvalidOptionsException { + if (!(globalOptionsPanel instanceof HashDbConfigPanel)) { + throw new InvalidOptionsException(""); // RJCTODO + } + + HashDbConfigPanel panel = (HashDbConfigPanel)globalOptionsPanel; + panel.store(); + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws InvalidOptionsException { + return null; // RJCTODO + } + + private static class IngestOptions implements Serializable { + boolean alwaysCalcHashes = true; + ArrayList hashSetNames = new ArrayList<>(); + } } From eb2a08f4c6ead736e6a42f01425eb9115d01ea38 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 20 Feb 2014 12:41:42 -0500 Subject: [PATCH 06/48] Make hash lookup ingest module implement FileINgestModule interface --- .../autopsy/ingest/FileIngestModule.java | 2 +- .../hashdatabase/HashDbIngestModule.java | 43 ++++++++++++++++++- 2 files changed, 42 insertions(+), 3 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index c77eb61db0..1caf184d67 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -24,7 +24,7 @@ import org.sleuthkit.datamodel.AbstractFile; /** * Interface that must be implemented by all data source ingest modules. */ -public interface FileIngestModule { +public interface FileIngestModule extends IngestModule { /** * Process a file. diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 412b67d4b2..8910292525 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -45,9 +45,10 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskException; import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb; +import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.datamodel.HashInfo; -public class HashDbIngestModule extends IngestModuleAbstractFile { +public class HashDbIngestModule extends IngestModuleAbstractFile implements FileIngestModule { private static HashDbIngestModule instance = null; public final static String MODULE_NAME = NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); @@ -69,7 +70,7 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { static long lookuptime = 0; private final Hash hasher = new Hash(); - private HashDbIngestModule() { + HashDbIngestModule() { } public static synchronized HashDbIngestModule getDefault() { @@ -144,6 +145,34 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { } } + @Override + public void init(long dataSourceTaskId) { + services = IngestServices.getDefault(); + skCase = Case.getCurrentCase().getSleuthkitCase(); + + HashDbManager hashDbManager = HashDbManager.getInstance(); + getHashSetsUsableForIngest(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); + getHashSetsUsableForIngest(hashDbManager.getKnownFileHashSets(), knownHashSets); + calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes(); + + if (knownHashSets.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + } + if (knownBadHashSets.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownBadHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); + } + } + @Override public void init(IngestModuleInit initContext) { services = IngestServices.getDefault(); @@ -195,6 +224,16 @@ public class HashDbIngestModule extends IngestModuleAbstractFile { return false; } + @Override + public void process(AbstractFile file) { + // Skip unallocated space files. + if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { + return; + } + + processFile(file); + } + @Override public ProcessResult process(PipelineContextpipelineContext, AbstractFile file) { //skip unalloc From 8977aa97a48b01c4b389bae3242849cdb3ec1a56 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 20 Feb 2014 13:01:34 -0500 Subject: [PATCH 07/48] Made HashLookupModuleFactory able to create a hash lookup module instance --- .../sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java index bea51cc485..cd02323ee1 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java @@ -107,7 +107,7 @@ public class HashLookupModuleFactory extends AbstractIngestModuleFactory { @Override public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws InvalidOptionsException { - return null; // RJCTODO + return new HashDbIngestModule(); } private static class IngestOptions implements Serializable { From fef91bca201502dcdfdb4e97e8ddfcba52600122 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 20 Feb 2014 13:14:30 -0500 Subject: [PATCH 08/48] Make HashDbIngestModule implement FileINgestModule instead of extend IngestModuleAbstractFile --- .../hashdatabase/HashDbIngestModule.java | 289 +++++------------- 1 file changed, 82 insertions(+), 207 deletions(-) diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 8910292525..c18916b250 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -23,15 +23,10 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.logging.Level; - import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -48,17 +43,9 @@ import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.datamodel.HashInfo; -public class HashDbIngestModule extends IngestModuleAbstractFile implements FileIngestModule { - private static HashDbIngestModule instance = null; - public final static String MODULE_NAME = NbBundle.getMessage(HashDbIngestModule.class, - "HashDbIngestModule.moduleName"); - public final static String MODULE_DESCRIPTION = NbBundle.getMessage(HashDbIngestModule.class, - "HashDbIngestModule.moduleDescription"); - final public static String MODULE_VERSION = Version.getVersion(); +public class HashDbIngestModule implements FileIngestModule { private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName()); private static final int MAX_COMMENT_SIZE = 500; - private HashDbSimpleConfigPanel simpleConfigPanel; - private HashDbConfigPanel advancedConfigPanel; private IngestServices services; private SleuthkitCase skCase; private static int messageId = 0; @@ -72,78 +59,6 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File HashDbIngestModule() { } - - public static synchronized HashDbIngestModule getDefault() { - if (instance == null) { - instance = new HashDbIngestModule(); - } - return instance; - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public boolean hasSimpleConfiguration() { - return true; - } - - @Override - public javax.swing.JPanel getSimpleConfiguration(String context) { - if (null == simpleConfigPanel) { - simpleConfigPanel = new HashDbSimpleConfigPanel(); - } - else { - simpleConfigPanel.load(); - } - - return simpleConfigPanel; - } - - @Override - public void saveSimpleConfiguration() { - if (simpleConfigPanel != null) { - simpleConfigPanel.store(); - } - } - - @Override - public boolean hasAdvancedConfiguration() { - return true; - } - - @Override - public javax.swing.JPanel getAdvancedConfiguration(String context) { - if (advancedConfigPanel == null) { - advancedConfigPanel = new HashDbConfigPanel(); - } - - advancedConfigPanel.load(); - return advancedConfigPanel; - } - - @Override - public void saveAdvancedConfiguration() { - if (advancedConfigPanel != null) { - advancedConfigPanel.store(); - } - - if (simpleConfigPanel != null) { - simpleConfigPanel.load(); - } - } @Override public void init(long dataSourceTaskId) { @@ -156,51 +71,25 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes(); if (knownHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageId, - this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + // RJCTODO +// services.postMessage(IngestMessage.createWarningMessage(++messageId, +// this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.noKnownHashDbSetMsg"), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); } if (knownBadHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageId, - this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownBadHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); + // RJCTODO +// services.postMessage(IngestMessage.createWarningMessage(++messageId, +// this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.noKnownBadHashDbSetMsg"), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); } } - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - skCase = Case.getCurrentCase().getSleuthkitCase(); - - HashDbManager hashDbManager = HashDbManager.getInstance(); - getHashSetsUsableForIngest(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); - getHashSetsUsableForIngest(hashDbManager.getKnownFileHashSets(), knownHashSets); - calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes(); - - if (knownHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageId, - this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); - } - if (knownBadHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageId, - this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownBadHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); - } - } - private void getHashSetsUsableForIngest(List hashDbs, List hashDbsForIngest) { assert hashDbs != null; assert hashDbsForIngest != null; @@ -219,11 +108,6 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File } } - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - @Override public void process(AbstractFile file) { // Skip unallocated space files. @@ -231,23 +115,10 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File return; } - processFile(file); - } - - @Override - public ProcessResult process(PipelineContextpipelineContext, AbstractFile file) { - //skip unalloc - if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - return processFile(file); - } - - private ProcessResult processFile(AbstractFile file) { // bail out if we have no hashes set if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) { - return ProcessResult.OK; +// return ProcessResult.OK; + return; } // calc hash value @@ -260,21 +131,22 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File calctime += (System.currentTimeMillis() - calcstart); } catch (IOException ex) { logger.log(Level.WARNING, "Error calculating hash of file " + name, ex); - services.postMessage(IngestMessage.createErrorMessage(++messageId, - HashDbIngestModule.this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.fileReadErrorMsg", - name), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.calcHashValueErr", - name))); - return ProcessResult.ERROR; +// services.postMessage(IngestMessage.createErrorMessage(++messageId, +// HashDbIngestModule.this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.fileReadErrorMsg", +// name), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.calcHashValueErr", +// name))); +// return ProcessResult.ERROR; + return; } } // look up in known bad first boolean foundBad = false; - ProcessResult ret = ProcessResult.OK; +// ProcessResult ret = ProcessResult.OK; for (HashDb db : knownBadHashSets) { try { long lookupstart = System.currentTimeMillis(); @@ -286,15 +158,15 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File skCase.setKnown(file, TskData.FileKnown.BAD); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex); - services.postMessage(IngestMessage.createErrorMessage(++messageId, - HashDbIngestModule.this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.hashLookupErrorMsg", - name), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.settingKnownBadStateErr", - name))); - ret = ProcessResult.ERROR; +// services.postMessage(IngestMessage.createErrorMessage(++messageId, +// HashDbIngestModule.this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.hashLookupErrorMsg", +// name), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.settingKnownBadStateErr", +// name))); +// ret = ProcessResult.ERROR; } String hashSetName = db.getHashSetName(); @@ -317,15 +189,15 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex); - services.postMessage(IngestMessage.createErrorMessage(++messageId, - HashDbIngestModule.this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.hashLookupErrorMsg", - name), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.lookingUpKnownBadHashValueErr", - name))); - ret = ProcessResult.ERROR; +// services.postMessage(IngestMessage.createErrorMessage(++messageId, +// HashDbIngestModule.this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.hashLookupErrorMsg", +// name), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.lookingUpKnownBadHashValueErr", +// name))); +// ret = ProcessResult.ERROR; } } @@ -342,38 +214,41 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File break; } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex); - services.postMessage(IngestMessage.createErrorMessage(++messageId, - HashDbIngestModule.this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.hashLookupErrorMsg", - name), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.settingsKnownStateErr", - name))); - ret = ProcessResult.ERROR; +// services.postMessage(IngestMessage.createErrorMessage(++messageId, +// HashDbIngestModule.this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.hashLookupErrorMsg", +// name), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.settingsKnownStateErr", +// name))); +// ret = ProcessResult.ERROR; } } lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex); - services.postMessage(IngestMessage.createErrorMessage(++messageId, - HashDbIngestModule.this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.hashLookupErrorMsg", - name), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.lookingUpKnownHashValueErr", - name))); - ret = ProcessResult.ERROR; +// services.postMessage(IngestMessage.createErrorMessage(++messageId, +// HashDbIngestModule.this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.hashLookupErrorMsg", +// name), +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.lookingUpKnownHashValueErr", +// name))); +// ret = ProcessResult.ERROR; } } } - return ret; +// return ret; } - + private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage) { try { + // RJCTODO + String MODULE_NAME = NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); + BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT); //TODO Revisit usage of deprecated constructor as per TSK-583 //BlackboardAttribute att2 = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MODULE_NAME, "Known Bad", hashSetName); @@ -414,13 +289,13 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File detailsSb.append(""); - services.postMessage(IngestMessage.createDataMessage(++messageId, this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.postToBB.knownBadMsg", - abstractFile.getName()), - detailsSb.toString(), - abstractFile.getName() + md5Hash, - badFile)); +// services.postMessage(IngestMessage.createDataMessage(++messageId, this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.postToBB.knownBadMsg", +// abstractFile.getName()), +// detailsSb.toString(), +// abstractFile.getName() + md5Hash, +// badFile)); } services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile))); } catch (TskException ex) { @@ -457,12 +332,12 @@ public class HashDbIngestModule extends IngestModuleAbstractFile implements File } detailsSb.append(""); - services.postMessage(IngestMessage.createMessage(++messageId, - IngestMessage.MessageType.INFO, - this, - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.complete.hashLookupResults"), - detailsSb.toString())); +// services.postMessage(IngestMessage.createMessage(++messageId, +// IngestMessage.MessageType.INFO, +// this, +// NbBundle.getMessage(this.getClass(), +// "HashDbIngestModule.complete.hashLookupResults"), +// detailsSb.toString())); } } From b4d5fc1e45ff0cf4ff136b6bd7cdc5ebcebbd9f8 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 20 Feb 2014 17:46:32 -0500 Subject: [PATCH 09/48] Back up work completed on parallel file ingest --- .../ingest/GeneralIngestConfigurator.java | 167 ++++++------ .../autopsy/ingest/IngestDialogPanel.java | 244 +++++++----------- .../autopsy/ingest/IngestManager.java | 4 + .../autopsy/ingest/IngestModuleLoader.java | 11 +- .../autopsy/ingest/IngestModuleTemplate.java | 61 +++++ 5 files changed, 252 insertions(+), 235 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java index 203e98cbe8..8ab04fdbcc 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java @@ -20,7 +20,9 @@ package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; import java.util.List; +import java.util.Map; import javax.swing.JPanel; import org.openide.util.lookup.ServiceProvider; import org.sleuthkit.autopsy.coreutils.ModuleSettings; @@ -51,92 +53,99 @@ public class GeneralIngestConfigurator implements IngestConfigurator { return loadSettingsForContext(); } - private List loadSettingsForContext() { - List messages = new ArrayList<>(); - List allModules = IngestManager.getDefault().enumerateAllModules(); + private List loadSettingsForContext() { + List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); + + // Get the enabled and disabled ingest modules settings from the user's + // config file. The default settings make all ingest modules enabled. + HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, moduleListToCsv(moduleFactories)); + HashSet disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, ""); - // If there is no enabled ingest modules setting for this user, default to enabling all - // of the ingest modules the IngestManager has loaded. - if (ModuleSettings.settingExists(moduleContext, ENABLED_INGEST_MODULES_KEY) == false) { - String defaultSetting = moduleListToCsv(allModules); - ModuleSettings.setConfigSetting(moduleContext, ENABLED_INGEST_MODULES_KEY, defaultSetting); - } - - String[] enabledModuleNames = ModuleSettings.getConfigSetting(moduleContext, ENABLED_INGEST_MODULES_KEY).split(", "); - ArrayList enabledList = new ArrayList<>(Arrays.asList(enabledModuleNames)); - - // Check for modules that are missing from the config file - - String[] disabledModuleNames = null; - // Older config files won't have the disabled list, so don't assume it exists - if (ModuleSettings.settingExists(moduleContext, DISABLED_INGEST_MODULES_KEY)) { - disabledModuleNames = ModuleSettings.getConfigSetting(moduleContext, DISABLED_INGEST_MODULES_KEY).split(", "); + // Set up a collection of module templates for the view. + List moduleTemplates = new ArrayList<>(); + HashSet foundModules = new HashSet<>(); + for (IngestModuleFactory moduleFactory : moduleFactories) { + String moduleName = moduleFactory.getModuleDisplayName(); + IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, null, enabledModuleNames.contains(moduleName)); + if (!enabledModuleNames.contains(moduleName) && !enabledModuleNames.contains(moduleName)) { + // The module factory was loaded, but the module name does not + // appear in the enabled/disabled module settings. Treat the + // module as a new module and enable it by default. + moduleTemplate.setEnabled(true); + enabledModuleNames.add(moduleName); + } + foundModules.add(moduleName); } - for (IngestModuleAbstract module : allModules) { - boolean found = false; + // Check for missing modules and update the enabled/disabled ingest + // module settings. This way the settings will be up to date, even if + // save() is never called. + List errorMessages = new ArrayList<>(); + for (String moduleName : enabledModuleNames) { + if (!foundModules.contains(moduleName)) { + errorMessages.add(moduleName + " was previously enabled, but could not be found"); + enabledModuleNames.remove(moduleName); + disabledModuleNames.add(moduleName); + } + } + ModuleSettings.setConfigSetting(moduleContext, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(moduleContext, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - // Check enabled first - for (String moduleName : enabledModuleNames) { - if (module.getName().equals(moduleName)) { - found = true; - break; - } - } - - // Then check disabled - if (!found && (disabledModuleNames != null)) { - for (String moduleName : disabledModuleNames) { - if (module.getName().equals(moduleName)) { - found = true; - break; - } - } - } - - if (!found) { - enabledList.add(module.getName()); - // It will get saved to file later - } - } - - // Get the enabled ingest modules setting, check for missing modules, and pass the setting to - // the UI component. - List enabledModules = new ArrayList<>(); - for (String moduleName : enabledList) { - if (moduleName.equals("Thunderbird Parser") - || moduleName.equals("MBox Parser")) { - moduleName = "Email Parser"; - } - - IngestModuleAbstract moduleFound = null; - for (IngestModuleAbstract module : allModules) { - if (moduleName.equals(module.getName())) { - moduleFound = module; - break; - } - } - if (moduleFound != null) { - enabledModules.add(moduleFound); - } - else { - messages.add(moduleName + " was previously enabled, but could not be found"); - } - } - ingestDialogPanel.setEnabledIngestModules(enabledModules); - - // If there is no process unallocated space flag setting, default it to false. + // Get the process unallocated space flag setting. If the setting does + // not exist yet, default it to false. if (ModuleSettings.settingExists(moduleContext, PARSE_UNALLOC_SPACE_KEY) == false) { ModuleSettings.setConfigSetting(moduleContext, PARSE_UNALLOC_SPACE_KEY, "false"); - } - - // Get the process unallocated space flag setting and pass it to the UI component. + } boolean processUnalloc = Boolean.parseBoolean(ModuleSettings.getConfigSetting(moduleContext, PARSE_UNALLOC_SPACE_KEY)); + + // Pass the settings to the nigest dialog panel. + ingestDialogPanel.setEnabledIngestModules(enabledModules); ingestDialogPanel.setProcessUnallocSpaceEnabled(processUnalloc); - return messages; + return errorMessages; } - + + private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { + // Get the ingest modules setting from the user's config file. + // If there is no such setting yet, create the default setting. + if (ModuleSettings.settingExists(moduleContext, key) == false) { + ModuleSettings.setConfigSetting(moduleContext, key, defaultSetting); + } + HashSet moduleNames = new HashSet<>(); + String modulesSetting = ModuleSettings.getConfigSetting(moduleContext, key); + if (!modulesSetting.isEmpty()) { + String[] settingNames = modulesSetting.split(", "); + for (String name : settingNames) { + // Map some old core module names to the current core module names. + if (name.equals("Thunderbird Parser") || name.equals("MBox Parser")) { + moduleNames.add("Email Parser"); + } + else if (name.equals("File Extension Mismatch Detection") || name.equals("Extension Mismatch Detector")) { + moduleNames.add("File Extension Mismatch Detector"); + } + else { + moduleNames.add(name); + } + } + } + return moduleNames; + } + + private static String makeCommaSeparatedList(HashSet input) { + if (input == null || input.isEmpty()) { + return ""; + } + + ArrayList list = new ArrayList<>(); + list.addAll(input); + StringBuilder csvList = new StringBuilder(); + for (int i = 0; i < list.size() - 1; ++i) { + csvList.append(list.get(i)).append(", "); + } + csvList.append(list.get(list.size() - 1)); + return csvList.toString(); + } + @Override public JPanel getIngestConfigPanel() { // Note that this panel allows for selecting modules for the ingest process, @@ -166,18 +175,18 @@ public class GeneralIngestConfigurator implements IngestConfigurator { } } - private static String moduleListToCsv(List lst) { + private static String moduleListToCsv(List lst) { if (lst == null || lst.isEmpty()) { return ""; } StringBuilder sb = new StringBuilder(); for (int i = 0; i < lst.size() - 1; ++i) { - sb.append(lst.get(i).getName()).append(", "); + sb.append(lst.get(i).getModuleDisplayName()).append(", "); } // and the last one - sb.append(lst.get(lst.size() - 1).getName()); + sb.append(lst.get(lst.size() - 1).getModuleDisplayName()); return sb.toString(); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java index ff6ef874e9..b810e51fff 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java @@ -23,10 +23,12 @@ import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; +import java.io.Serializable; import java.util.AbstractMap; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.ListSelectionModel; import javax.swing.event.ListSelectionEvent; @@ -41,92 +43,64 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; * main configuration panel for all ingest modules, reusable JPanel component */ class IngestDialogPanel extends javax.swing.JPanel { + private List moduleModels = null; + private IngestModuleModel selectedModuleModel = null; + private boolean processUnallocatedSpace = false; + private ModulesTableModel tableModel = null; - private IngestModuleAbstract currentModule; - private ModulesTableModel tableModel; - private String context; - - /** - * Creates new form IngestDialogPanel - */ - public IngestDialogPanel() { - tableModel = new ModulesTableModel(); - context = ModuleSettings.DEFAULT_CONTEXT; + IngestDialogPanel(List moduleModels, boolean processUnallocatedSpace) { + this.moduleModels = moduleModels; + this.processUnallocatedSpace = processUnallocatedSpace; initComponents(); customizeComponents(); } - - public void setContext(String context) { - this.context = context; - } - - - public IngestModuleAbstract getCurrentIngestModule() { - return currentModule; - } - - public List getModulesToStart() { - return tableModel.getSelectedModules(); - } - - public List getDisabledModules() { - return tableModel.getUnSelectedModules(); - } - - public boolean processUnallocSpaceEnabled() { + + boolean getProcessUnallocSpace() { return processUnallocCheckbox.isSelected(); } - + private void customizeComponents() { modulesTable.setModel(tableModel); modulesTable.setTableHeader(null); modulesTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); - //custom renderer for tooltips - ModulesTableRenderer renderer = new ModulesTableRenderer(); - - //customize column witdhs - final int width = modulesScrollPane.getPreferredSize().width; - TableColumn column = null; - for (int i = 0; i < modulesTable.getColumnCount(); i++) { - column = modulesTable.getColumnModel().getColumn(i); - if (i == 0) { - column.setPreferredWidth(((int) (width * 0.15))); + // Set the column widths in the table model and add a custom cell + // renderer that will display module descriptions from the module models + // as tooltips. + ModulesTableRenderer renderer = new ModulesTableRenderer(); + int width = modulesScrollPane.getPreferredSize().width; + for (int i = 0; i < modulesTable.getColumnCount(); ++i) { + TableColumn column = modulesTable.getColumnModel().getColumn(i); + if (0 == i) { + column.setPreferredWidth(((int)(width * 0.15))); } else { column.setCellRenderer(renderer); - column.setPreferredWidth(((int) (width * 0.84))); + column.setPreferredWidth(((int)(width * 0.84))); } } + // Add a selection listener to the table model that will display the + // ingest options panel of the currently selected module model and + // enable or disable the global options panel invocation button. modulesTable.getSelectionModel().addListSelectionListener(new ListSelectionListener() { @Override public void valueChanged(ListSelectionEvent e) { - ListSelectionModel listSelectionModel = (ListSelectionModel) e.getSource(); + ListSelectionModel listSelectionModel = (ListSelectionModel)e.getSource(); if (!listSelectionModel.isSelectionEmpty()) { int index = listSelectionModel.getMinSelectionIndex(); - currentModule = tableModel.getModule(index); - - // add the module-specific configuration panel, if there is one + selectedModuleModel = moduleModels.get(index); simplePanel.removeAll(); - if (currentModule.hasSimpleConfiguration()) { - simplePanel.add(currentModule.getSimpleConfiguration(context)); + if (null != selectedModuleModel.getIngestOptionsPanel()) { + simplePanel.add(selectedModuleModel.getIngestOptionsPanel()); } simplePanel.revalidate(); simplePanel.repaint(); - advancedButton.setEnabled(currentModule.hasAdvancedConfiguration()); - } else { - currentModule = null; + advancedButton.setEnabled(null != selectedModuleModel.getGlobalOptionsPanel()); } } }); - } - - public void setProcessUnallocSpaceEnabled(final boolean enabled) { - processUnallocCheckbox.setSelected(enabled); - } - - public void setEnabledIngestModules(List enabledModules) { - tableModel.setSelectedModules(enabledModules); + + processUnallocCheckbox.setSelected(processUnallocatedSpace); } /** @@ -277,11 +251,11 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; dialog.close(); } }); - dialog.display(currentModule.getAdvancedConfiguration(context)); + dialog.display(selectedModuleModel.getGlobalOptionsPanel()); }//GEN-LAST:event_advancedButtonActionPerformed private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed - // nothing to do here + processUnallocatedSpace = processUnallocCheckbox.isSelected(); }//GEN-LAST:event_processUnallocCheckboxActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton advancedButton; @@ -296,20 +270,57 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; private javax.swing.ButtonGroup timeGroup; // End of variables declaration//GEN-END:variables - private class ModulesTableModel extends AbstractTableModel { - - private List>moduleData = new ArrayList<>(); + static class IngestModuleModel { + private final IngestModuleFactory moduleFactory; + private final JPanel ingestOptionsPanel; + private final JPanel globalOptionsPanel; + private boolean enabled = true; - public ModulesTableModel() { - List modules = IngestManager.getDefault().enumerateAllModules(); - for (IngestModuleAbstract ingestModuleAbstract : modules) { - moduleData.add(new AbstractMap.SimpleEntry<>(ingestModuleAbstract, Boolean.TRUE)); + IngestModuleModel(IngestModuleFactory moduleFactory, Serializable ingestOptions, boolean enabled) { + this.moduleFactory = moduleFactory; + this.enabled = enabled; + if (moduleFactory.providesIngestOptionsPanels()) { + ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(ingestOptions); + } + else { + ingestOptionsPanel = null; + } + if (moduleFactory.providesGlobalOptionsPanels()) { + + } + else { } } + + String getModuleDisplayName() { + return moduleFactory.getModuleDisplayName(); + } + String getModuleDescription() { + return moduleFactory.getModuleDescription(); + } + + void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + boolean isEnabled() { + return enabled; + } + + JPanel getIngestOptionsPanel() { + return ingestOptionsPanel; + } + + JPanel getGlobalOptionsPanel() { + return globalOptionsPanel; + } + } + + private class ModulesTableModel extends AbstractTableModel { @Override public int getRowCount() { - return moduleData.size(); + return moduleModels.size(); } @Override @@ -319,11 +330,12 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; @Override public Object getValueAt(int rowIndex, int columnIndex) { - Map.Entry entry = moduleData.get(rowIndex); + IngestModuleTemplate moduleTemplate = moduleModels.get(rowIndex); if (columnIndex == 0) { - return entry.getValue(); - } else { - return entry.getKey().getName(); + return moduleTemplate.isEnabled(); + } + else { + return moduleTemplate.getModuleDisplayName(); } } @@ -335,87 +347,14 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; @Override public void setValueAt(Object aValue, int rowIndex, int columnIndex) { if (columnIndex == 0) { - moduleData.get(rowIndex).setValue((Boolean)aValue); + moduleModels.get(rowIndex).setEnabled((boolean)aValue); } } @Override public Class getColumnClass(int c) { return getValueAt(0, c).getClass(); - } - - public List getSelectedModules() { - List selectedModules = new ArrayList<>(); - for (Map.Entry entry : moduleData) { - if (entry.getValue().booleanValue()) { - selectedModules.add(entry.getKey()); - } - } - return selectedModules; - } - - public List getUnSelectedModules() { - List unselectedModules = new ArrayList<>(); - for (Map.Entry entry : moduleData) { - if (!entry.getValue().booleanValue()) { - unselectedModules.add(entry.getKey()); - } - } - return unselectedModules; } - - /** - * Sets the given modules as selected in the modules table - * @param selectedModules - */ - public void setSelectedModules(List selectedModules) { - // unselect all modules - for (Map.Entry entry : moduleData) { - entry.setValue(Boolean.FALSE); - } - - // select only the given modules - for (IngestModuleAbstract selectedModule : selectedModules) { - getEntryForModule(selectedModule).setValue(Boolean.TRUE); - } - - // tell everyone about it - fireTableDataChanged(); - } - - /** - * Sets the given modules as NOT selected in the modules table - * @param selectedModules - */ - public void setUnselectedModules(List unselectedModules) { - // select all modules - for (Map.Entry entry : moduleData) { - entry.setValue(Boolean.TRUE); - } - - // unselect only the given modules - for (IngestModuleAbstract unselectedModule : unselectedModules) { - getEntryForModule(unselectedModule).setValue(Boolean.FALSE); - } - - // tell everyone about it - fireTableDataChanged(); - } - - public IngestModuleAbstract getModule(int row) { - return moduleData.get(row).getKey(); - } - - private Map.Entry getEntryForModule(IngestModuleAbstract module) { - Map.Entry entry = null; - for (Map.Entry anEntry : moduleData) { - if (anEntry.getKey().equals(module)) { - entry = anEntry; - break; - } - } - return entry; - } } /** @@ -426,20 +365,15 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; List tooltips = new ArrayList<>(); public ModulesTableRenderer() { - List modules = IngestManager.getDefault().enumerateAllModules(); - for (IngestModuleAbstract ingestModuleAbstract : modules) { - tooltips.add(ingestModuleAbstract.getDescription()); + for (IngestModuleTemplate moduleTemplate : moduleModels) { + tooltips.add(moduleTemplate.getModuleDescription()); } } @Override - public Component getTableCellRendererComponent( - JTable table, Object value, - boolean isSelected, boolean hasFocus, - int row, int column) { + public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column); - - if (column == 1) { + if (1 == column) { setToolTipText(tooltips.get(row)); } return this; diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 6e079a2d53..8c4250901b 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -792,6 +792,10 @@ public class IngestManager { modules.addAll(enumerateAbstractFileModules()); return modules; } + + List getIngestModuleFactories() { + return moduleLoader.getIngestModuleFactories(); + } //data source worker to remove itself when complete or interrupted void removeDataSourceIngestWorker(IngestDataSourceThread worker) { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index c3f406c89e..c3ea5433a2 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -91,7 +91,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * code refactored */ final class IngestModuleLoader { - + private ArrayList moduleFactories = new ArrayList<>(); private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml"; private static final String XSDFILE = "PipelineConfigSchema.xsd"; private String absFilePath; @@ -456,6 +456,10 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; return urls; } + List getIngestModuleFactories() { + return moduleFactories; + } + /** * Auto-discover ingest modules in all platform modules that are "enabled" * If discovered ingest module is not already in XML config, add it do @@ -466,6 +470,11 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; @SuppressWarnings("unchecked") private void autodiscover() throws IngestModuleLoaderException { + Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); + moduleFactories.addAll(factories); + +// moduleFactories + // Use Lookup to find the other NBM modules. We'll later search them for ingest modules Collection moduleInfos = Lookup.getDefault().lookupAll(ModuleInfo.class); logger.log(Level.INFO, "Autodiscovery, found #platform modules: " + moduleInfos.size()); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java new file mode 100755 index 0000000000..5580dfa171 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java @@ -0,0 +1,61 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import java.io.Serializable; + +/** + * RJCTODO + */ +public class IngestModuleTemplate { + private final IngestModuleFactory moduleFactory; + private Serializable ingestOptions = null; + private boolean enabled = true; + + IngestModuleTemplate(IngestModuleFactory moduleFactory, Serializable ingestOptions, boolean enabled) { + this.moduleFactory = moduleFactory; + this.ingestOptions = ingestOptions; + this.enabled = enabled; + } + + String getModuleDisplayName() { + return moduleFactory.getModuleDisplayName(); + } + + String getModuleDescription() { + return moduleFactory.getModuleDescription(); + } + + Serializable getIngestOptions() { + return ingestOptions; + } + + void setIngestOptions(Serializable ingestOptions) { + this.ingestOptions = ingestOptions; + } + + void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + boolean isEnabled() { + return enabled; + } +} From 44b155df4cab9e87d192d368c6ad9dcddda0d963 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 21 Feb 2014 12:15:56 -0500 Subject: [PATCH 10/48] Complete first phase of first draft of changes to ingest config --- .../autopsy/ingest/Bundle.properties | 6 +- .../ingest/GeneralIngestConfigurator.java | 348 ++++++++++++------ ...nel.form => IngestConfigurationPanel.form} | 6 +- ...nel.java => IngestConfigurationPanel.java} | 132 +++---- .../autopsy/ingest/IngestModuleTemplate.java | 25 +- 5 files changed, 289 insertions(+), 228 deletions(-) rename Core/src/org/sleuthkit/autopsy/ingest/{IngestDialogPanel.form => IngestConfigurationPanel.form} (95%) rename Core/src/org/sleuthkit/autopsy/ingest/{IngestDialogPanel.java => IngestConfigurationPanel.java} (80%) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties index 0674b59bf7..49944ff5e4 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties @@ -21,7 +21,6 @@ IngestMessageDetailsPanel.viewArtifactButton.text=Go to Result IngestMessageDetailsPanel.viewContentButton.text=Go to Directory IngestMessagePanel.sortByLabel.text=Sort by: IngestMessagePanel.sortByComboBox.toolTipText=Sort messages by time (chronological order) or message priority -IngestDialogPanel.advancedButton.text=Advanced IngestMessageDetailsPanel.messageDetailsPane.contentType=text/html IngestMessageDetailsPanel.messageDetailsPane.toolTipText= IngestMessagesToolbar.toolTipText= @@ -32,5 +31,6 @@ IngestMessagePanel.totalMessagesNameLabel.text=Total: IngestMessagePanel.totalMessagesNameVal.text=- IngestMessagePanel.totalUniqueMessagesNameLabel.text=Unique: IngestMessagePanel.totalUniqueMessagesNameVal.text=- -IngestDialogPanel.processUnallocCheckbox.text=Process Unallocated Space -IngestDialogPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images. +IngestConfigurationPanel.advancedButton.text=Advanced +IngestConfigurationPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images. +IngestConfigurationPanel.processUnallocCheckbox.text=Process Unallocated Space diff --git a/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java index 8ab04fdbcc..dd32a1d1cd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013 Basis Technology Corp. + * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,121 +16,117 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashSet; import java.util.List; -import java.util.Map; import javax.swing.JPanel; -import org.openide.util.lookup.ServiceProvider; import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.datamodel.Content; -@ServiceProvider(service = IngestConfigurator.class) -public class GeneralIngestConfigurator implements IngestConfigurator { - - public static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules"; - public static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules"; - public static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space"; - private List contentToIngest; - private IngestManager manager; - private IngestDialogPanel ingestDialogPanel; - private String moduleContext; +/** + * Controller to allow a user to set context-sensitive ingest module options, + * enable/disable ingest modules, and set general ingest options. Provides an + * ingest module module model class and instances of a UI component to its + * clients (Model-View-Controller design pattern). + */ +public class GeneralIngestConfigurator { + private static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules"; + private static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules"; + private static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space"; + private final IngestManager ingestManager = IngestManager.getDefault(); + private String context = null; + private boolean processUnallocatedSpace = false; + private IngestConfigurationPanel ingestConfigPanel = null; + private List contentToIngest = null; // RJCTODO: Remove if start() method removed + public class IngestConfigurationException extends Exception { + IngestConfigurationException(String message) { + super(message); + } + } + + /** + * RJCTODO + * @param context + */ public GeneralIngestConfigurator() { - this.moduleContext = IngestManager.MODULE_PROPERTIES; - ingestDialogPanel = new IngestDialogPanel(); - ingestDialogPanel.setContext(moduleContext); - manager = IngestManager.getDefault(); } - @Override + /** + * RJCTODO + * @param context + * @return + */ public List setContext(String context) { - moduleContext = context; - ingestDialogPanel.setContext(moduleContext); - return loadSettingsForContext(); - } - - private List loadSettingsForContext() { - List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); - - // Get the enabled and disabled ingest modules settings from the user's - // config file. The default settings make all ingest modules enabled. - HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, moduleListToCsv(moduleFactories)); + this.context = context; + return initializeForContext(); + } + + private List initializeForContext() { + // Get the enabled and disabled ingest modules settings for the current + // context. The default settings make all ingest modules enabled. + List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); // RJCTODO: Put in uniqueness test in loader! + HashSet loadedModuleNames = new HashSet<>(); + for (IngestModuleFactory moduleFactory : moduleFactories) { + loadedModuleNames.add(moduleFactory.getModuleDisplayName()); + } + HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(loadedModuleNames)); HashSet disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, ""); - // Set up a collection of module templates for the view. - List moduleTemplates = new ArrayList<>(); - HashSet foundModules = new HashSet<>(); - for (IngestModuleFactory moduleFactory : moduleFactories) { + // Create ingest module templates for the ingest module pipelines and + // wrap them in ingest module models to pass to the ingest configuration + // panel (view). The initial enabled/disabled state of the module models + // comes from the context-sensitive settings. + HashSet knownModuleNames = new HashSet<>(); + List modules = new ArrayList<>(); + for (IngestModuleFactory moduleFactory : moduleFactories) { + // NOTE: In the future, this code will be modified to get the ingest + // options for each modules for the current context; for now just + // get the default ingest options. + IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, moduleFactory.getDefaultIngestOptions()); String moduleName = moduleFactory.getModuleDisplayName(); - IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, null, enabledModuleNames.contains(moduleName)); + IngestModuleModel module = new IngestModuleModel(moduleTemplate, enabledModuleNames.contains(moduleName)); if (!enabledModuleNames.contains(moduleName) && !enabledModuleNames.contains(moduleName)) { // The module factory was loaded, but the module name does not // appear in the enabled/disabled module settings. Treat the // module as a new module and enable it by default. - moduleTemplate.setEnabled(true); - enabledModuleNames.add(moduleName); + module.setEnabled(true); + enabledModuleNames.add(moduleName); // RJCTODO: Put in uniqueness test, i.e., check return value! } - foundModules.add(moduleName); + modules.add(module); + knownModuleNames.add(moduleName); } // Check for missing modules and update the enabled/disabled ingest - // module settings. This way the settings will be up to date, even if - // save() is never called. + // module settings. This way the settings for the context will be + // up-to-date, even if save() is never called. List errorMessages = new ArrayList<>(); for (String moduleName : enabledModuleNames) { - if (!foundModules.contains(moduleName)) { + if (!knownModuleNames.contains(moduleName)) { errorMessages.add(moduleName + " was previously enabled, but could not be found"); enabledModuleNames.remove(moduleName); disabledModuleNames.add(moduleName); } } - ModuleSettings.setConfigSetting(moduleContext, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); - ModuleSettings.setConfigSetting(moduleContext, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); + ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); // Get the process unallocated space flag setting. If the setting does // not exist yet, default it to false. - if (ModuleSettings.settingExists(moduleContext, PARSE_UNALLOC_SPACE_KEY) == false) { - ModuleSettings.setConfigSetting(moduleContext, PARSE_UNALLOC_SPACE_KEY, "false"); + if (ModuleSettings.settingExists(context, PARSE_UNALLOC_SPACE_KEY) == false) { + ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, "false"); } - boolean processUnalloc = Boolean.parseBoolean(ModuleSettings.getConfigSetting(moduleContext, PARSE_UNALLOC_SPACE_KEY)); + processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); - // Pass the settings to the nigest dialog panel. - ingestDialogPanel.setEnabledIngestModules(enabledModules); - ingestDialogPanel.setProcessUnallocSpaceEnabled(processUnalloc); + // Make the configuration panel for the current context (view). + ingestConfigPanel = new IngestConfigurationPanel(modules, processUnallocatedSpace); return errorMessages; } - private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { - // Get the ingest modules setting from the user's config file. - // If there is no such setting yet, create the default setting. - if (ModuleSettings.settingExists(moduleContext, key) == false) { - ModuleSettings.setConfigSetting(moduleContext, key, defaultSetting); - } - HashSet moduleNames = new HashSet<>(); - String modulesSetting = ModuleSettings.getConfigSetting(moduleContext, key); - if (!modulesSetting.isEmpty()) { - String[] settingNames = modulesSetting.split(", "); - for (String name : settingNames) { - // Map some old core module names to the current core module names. - if (name.equals("Thunderbird Parser") || name.equals("MBox Parser")) { - moduleNames.add("Email Parser"); - } - else if (name.equals("File Extension Mismatch Detection") || name.equals("Extension Mismatch Detector")) { - moduleNames.add("File Extension Mismatch Detector"); - } - else { - moduleNames.add(name); - } - } - } - return moduleNames; - } - private static String makeCommaSeparatedList(HashSet input) { if (input == null || input.isEmpty()) { return ""; @@ -145,73 +141,181 @@ public class GeneralIngestConfigurator implements IngestConfigurator { csvList.append(list.get(list.size() - 1)); return csvList.toString(); } - - @Override - public JPanel getIngestConfigPanel() { - // Note that this panel allows for selecting modules for the ingest process, - // specifying the process unallocated space flag, and also specifying settings - // for a selected ingest module. - return ingestDialogPanel; + + private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { + // Get the ingest modules setting from the user's config file. + // If there is no such setting yet, create the default setting. + if (ModuleSettings.settingExists(context, key) == false) { + ModuleSettings.setConfigSetting(context, key, defaultSetting); + } + HashSet moduleNames = new HashSet<>(); + String modulesSetting = ModuleSettings.getConfigSetting(context, key); + if (!modulesSetting.isEmpty()) { + String[] settingNames = modulesSetting.split(", "); + for (String name : settingNames) { + // Map some old core module names to the current core module names. + switch (name) { + case "Thunderbird Parser": + case "MBox Parser": + moduleNames.add("Email Parser"); + break; + case "File Extension Mismatch Detection": + case "Extension Mismatch Detector": + moduleNames.add("File Extension Mismatch Detector"); + break; + default: + moduleNames.add(name); + } + } + } + return moduleNames; + } + + public JPanel getIngestConfigPanel() throws IngestConfigurationException { + if (null == context || null == ingestConfigPanel) { + throw new IngestConfigurationException("Ingest context not set"); + } + + return ingestConfigPanel; } - @Override - public void save() { - // Save the user's configuration of the set of enabled ingest modules. - String enabledModulesCsvList = moduleListToCsv(ingestDialogPanel.getModulesToStart()); - ModuleSettings.setConfigSetting(moduleContext, ENABLED_INGEST_MODULES_KEY, enabledModulesCsvList); - - // Save the user's configuration of the set of disabled ingest modules. - String disabledModulesCsvList = moduleListToCsv(ingestDialogPanel.getDisabledModules()); - ModuleSettings.setConfigSetting(moduleContext, DISABLED_INGEST_MODULES_KEY, disabledModulesCsvList); - - // Save the user's setting for the process unallocated space flag. - String processUnalloc = Boolean.toString(ingestDialogPanel.processUnallocSpaceEnabled()); - ModuleSettings.setConfigSetting(moduleContext, PARSE_UNALLOC_SPACE_KEY, processUnalloc); - - // Save the user's configuration of the currently selected ingest module. - IngestModuleAbstract currentModule = ingestDialogPanel.getCurrentIngestModule(); - if (currentModule != null && currentModule.hasSimpleConfiguration()) { - currentModule.saveSimpleConfiguration(); - } - } - - private static String moduleListToCsv(List lst) { - if (lst == null || lst.isEmpty()) { - return ""; + public void save() throws IngestConfigurationException { + if (null == context || null == ingestConfigPanel) { + throw new IngestConfigurationException("Ingest context not set"); } - StringBuilder sb = new StringBuilder(); - for (int i = 0; i < lst.size() - 1; ++i) { - sb.append(lst.get(i).getModuleDisplayName()).append(", "); - } + List modules = ingestConfigPanel.getIngestModules(); - // and the last one - sb.append(lst.get(lst.size() - 1).getModuleDisplayName()); + // Save the enbaled/disabled ingest module settings for the current context. + HashSet enabledModuleNames = new HashSet<>(); + HashSet disabledModuleNames = new HashSet<>(); + for (IngestModuleModel module : modules) { + if (module.isEnabled()) { + enabledModuleNames.add(module.getModuleName()); + } + else { + disabledModuleNames.add(module.getModuleName()); + } + } + ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - return sb.toString(); + // Save the process unallocated space setting for this context. + String processUnalloc = Boolean.toString(ingestConfigPanel.getProcessUnallocSpace()); + ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, processUnalloc); + + // Get the ingest module options for each ingest module. + // NOTE: In the future, this code will be modified to persist the ingest + // options for each ingest module for the current context. + // RJCTODO: Decide whether to set the ingest options here or in the dialog; in the dialog allows corrections by user +// if (currentModule != null && currentModule.hasSimpleConfiguration()) { +// currentModule.saveSimpleConfiguration(); +// } } - - @Override + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. public void setContent(List inputContent) { this.contentToIngest = inputContent; } - @Override + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. public void start() { // Get the list of ingest modules selected by the user. - List modulesToStart = ingestDialogPanel.getModulesToStart(); + // RJCTODO: +// List modulesToStart = ingestConfigPanel.getModulesToStart(); + List modulesToStart = new ArrayList<>(); // Get the user's selection of whether or not to process unallocated space. - manager.setProcessUnallocSpace(ingestDialogPanel.processUnallocSpaceEnabled()); + ingestManager.setProcessUnallocSpace(processUnallocatedSpace); if (!modulesToStart.isEmpty() && contentToIngest != null) { // Queue the ingest process. - manager.scheduleDataSource(modulesToStart, contentToIngest); + ingestManager.scheduleDataSource(modulesToStart, contentToIngest); } } - @Override + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. public boolean isIngestRunning() { - return manager.isIngestRunning(); - } + return ingestManager.isIngestRunning(); + } + + /** + * A model of an ingest module tailored for the view used to configure + * ingest modules. + */ + static class IngestModuleModel { + private final IngestModuleTemplate moduleTemplate; + private final IngestModuleFactory moduleFactory; + private final JPanel ingestOptionsPanel; + private final JPanel globalOptionsPanel; + private boolean enabled = true; + + IngestModuleModel(IngestModuleTemplate moduleTemplate, boolean enabled) { + this.moduleTemplate = moduleTemplate; + moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.providesIngestOptionsPanels()) { + ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(moduleTemplate.getIngestOptions()); + } + else { + ingestOptionsPanel = null; + } + if (moduleFactory.providesGlobalOptionsPanels()) { + globalOptionsPanel = moduleFactory.getGlobalOptionsPanel(); + } + else { + globalOptionsPanel = null; + } + this.enabled = enabled; + } + + String getModuleName() { + return moduleFactory.getModuleDisplayName(); + } + + String getModuleDescription() { + return moduleFactory.getModuleDescription(); + } + + void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + boolean isEnabled() { + return enabled; + } + + boolean hasIngestOptionsPanel() { + return moduleFactory.providesIngestOptionsPanels(); + } + + JPanel getIngestOptionsPanel() { + return ingestOptionsPanel; + } + + boolean hasGlobalOptionsPanel() { + return moduleFactory.providesGlobalOptionsPanels(); + } + + JPanel getGlobalOptionsPanel() { + return globalOptionsPanel; + } + + void saveGlobalOptions() throws IngestModuleFactory.InvalidOptionsException { + // RJCTODO: Check for null. + moduleFactory.saveGlobalOptionsFromPanel(globalOptionsPanel); + } + + private IngestModuleTemplate getIngestModuleTemplate() { + return moduleTemplate; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.form b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.form similarity index 95% rename from Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.form rename to Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.form index ad7862b24a..204ee54c11 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.form +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.form @@ -134,7 +134,7 @@ - + @@ -201,10 +201,10 @@ - + - + diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java similarity index 80% rename from Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java rename to Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java index b810e51fff..abe805df15 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialogPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java @@ -23,12 +23,8 @@ import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; -import java.io.Serializable; -import java.util.AbstractMap; import java.util.ArrayList; import java.util.List; -import java.util.Map; -import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.ListSelectionModel; import javax.swing.event.ListSelectionEvent; @@ -37,24 +33,30 @@ import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableColumn; import org.sleuthkit.autopsy.corecomponents.AdvancedConfigurationDialog; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; +import org.sleuthkit.autopsy.ingest.GeneralIngestConfigurator.IngestModuleModel; /** - * main configuration panel for all ingest modules, reusable JPanel component + * User interface component to allow a user to set ingest module options and + * enable/disable the modules. Designed as a view of a ingest module model class + * provided by a controller (Model-View-Controller design pattern). */ - class IngestDialogPanel extends javax.swing.JPanel { - private List moduleModels = null; - private IngestModuleModel selectedModuleModel = null; + class IngestConfigurationPanel extends javax.swing.JPanel { + private final List modules; + private IngestModuleModel selectedModule = null; private boolean processUnallocatedSpace = false; - private ModulesTableModel tableModel = null; + private IngestModulesTableModel tableModel = null; - IngestDialogPanel(List moduleModels, boolean processUnallocatedSpace) { - this.moduleModels = moduleModels; + IngestConfigurationPanel(List modules, boolean processUnallocatedSpace) { + this.modules = modules; this.processUnallocatedSpace = processUnallocatedSpace; initComponents(); customizeComponents(); } - + + List getIngestModules() { + return modules; + } + boolean getProcessUnallocSpace() { return processUnallocCheckbox.isSelected(); } @@ -67,7 +69,7 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; // Set the column widths in the table model and add a custom cell // renderer that will display module descriptions from the module models // as tooltips. - ModulesTableRenderer renderer = new ModulesTableRenderer(); + IngestModulesTableRenderer renderer = new IngestModulesTableRenderer(); int width = modulesScrollPane.getPreferredSize().width; for (int i = 0; i < modulesTable.getColumnCount(); ++i) { TableColumn column = modulesTable.getColumnModel().getColumn(i); @@ -88,14 +90,14 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; ListSelectionModel listSelectionModel = (ListSelectionModel)e.getSource(); if (!listSelectionModel.isSelectionEmpty()) { int index = listSelectionModel.getMinSelectionIndex(); - selectedModuleModel = moduleModels.get(index); + selectedModule = modules.get(index); simplePanel.removeAll(); - if (null != selectedModuleModel.getIngestOptionsPanel()) { - simplePanel.add(selectedModuleModel.getIngestOptionsPanel()); + if (null != selectedModule.getIngestOptionsPanel()) { + simplePanel.add(selectedModule.getIngestOptionsPanel()); } simplePanel.revalidate(); simplePanel.repaint(); - advancedButton.setEnabled(null != selectedModuleModel.getGlobalOptionsPanel()); + advancedButton.setEnabled(null != selectedModule.getGlobalOptionsPanel()); } } }); @@ -146,7 +148,7 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; jPanel1.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(160, 160, 160))); jPanel1.setPreferredSize(new java.awt.Dimension(338, 257)); - advancedButton.setText(org.openide.util.NbBundle.getMessage(IngestDialogPanel.class, "IngestDialogPanel.advancedButton.text")); // NOI18N + advancedButton.setText(org.openide.util.NbBundle.getMessage(IngestConfigurationPanel.class, "IngestConfigurationPanel.advancedButton.text")); // NOI18N advancedButton.setEnabled(false); advancedButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { @@ -184,8 +186,8 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; processUnallocPanel.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(160, 160, 160))); - processUnallocCheckbox.setText(org.openide.util.NbBundle.getMessage(IngestDialogPanel.class, "IngestDialogPanel.processUnallocCheckbox.text")); // NOI18N - processUnallocCheckbox.setToolTipText(org.openide.util.NbBundle.getMessage(IngestDialogPanel.class, "IngestDialogPanel.processUnallocCheckbox.toolTipText")); // NOI18N + processUnallocCheckbox.setText(org.openide.util.NbBundle.getMessage(IngestConfigurationPanel.class, "IngestConfigurationPanel.processUnallocCheckbox.text")); // NOI18N + processUnallocCheckbox.setToolTipText(org.openide.util.NbBundle.getMessage(IngestConfigurationPanel.class, "IngestConfigurationPanel.processUnallocCheckbox.toolTipText")); // NOI18N processUnallocCheckbox.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { processUnallocCheckboxActionPerformed(evt); @@ -238,20 +240,31 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; private void advancedButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_advancedButtonActionPerformed final AdvancedConfigurationDialog dialog = new AdvancedConfigurationDialog(); + dialog.addApplyButtonListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { + try { + if (selectedModule.hasGlobalOptionsPanel()) + selectedModule.saveGlobalOptions(); + } + catch (IngestModuleFactory.InvalidOptionsException ex) { + // RJCTODO: Error message box + // Return without closing to allow user to correct error. + return; + } dialog.close(); - currentModule.saveAdvancedConfiguration(); } }); + dialog.addWindowListener(new WindowAdapter() { @Override public void windowClosing(WindowEvent e) { dialog.close(); } }); - dialog.display(selectedModuleModel.getGlobalOptionsPanel()); + + dialog.display(selectedModule.getGlobalOptionsPanel()); }//GEN-LAST:event_advancedButtonActionPerformed private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed @@ -269,58 +282,15 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; private javax.swing.JPanel simplePanel; private javax.swing.ButtonGroup timeGroup; // End of variables declaration//GEN-END:variables - - static class IngestModuleModel { - private final IngestModuleFactory moduleFactory; - private final JPanel ingestOptionsPanel; - private final JPanel globalOptionsPanel; - private boolean enabled = true; - - IngestModuleModel(IngestModuleFactory moduleFactory, Serializable ingestOptions, boolean enabled) { - this.moduleFactory = moduleFactory; - this.enabled = enabled; - if (moduleFactory.providesIngestOptionsPanels()) { - ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(ingestOptions); - } - else { - ingestOptionsPanel = null; - } - if (moduleFactory.providesGlobalOptionsPanels()) { - - } - else { - } - } - - String getModuleDisplayName() { - return moduleFactory.getModuleDisplayName(); - } - - String getModuleDescription() { - return moduleFactory.getModuleDescription(); - } - - void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - boolean isEnabled() { - return enabled; - } - - JPanel getIngestOptionsPanel() { - return ingestOptionsPanel; - } - JPanel getGlobalOptionsPanel() { - return globalOptionsPanel; - } - } - - private class ModulesTableModel extends AbstractTableModel { + /** + * Custom table model to display ingest module names and enable/disable + * ingest modules. + */ + private class IngestModulesTableModel extends AbstractTableModel { @Override public int getRowCount() { - return moduleModels.size(); + return modules.size(); } @Override @@ -330,12 +300,12 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; @Override public Object getValueAt(int rowIndex, int columnIndex) { - IngestModuleTemplate moduleTemplate = moduleModels.get(rowIndex); + IngestModuleModel module = modules.get(rowIndex); if (columnIndex == 0) { - return moduleTemplate.isEnabled(); + return module.isEnabled(); } else { - return moduleTemplate.getModuleDisplayName(); + return module.getModuleName(); } } @@ -347,7 +317,7 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; @Override public void setValueAt(Object aValue, int rowIndex, int columnIndex) { if (columnIndex == 0) { - moduleModels.get(rowIndex).setEnabled((boolean)aValue); + modules.get(rowIndex).setEnabled((boolean)aValue); } } @@ -358,14 +328,14 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; } /** - * Custom cell renderer for tool tips with module description + * Custom cell renderer to create tool tips displaying ingest module + * descriptions. */ - private class ModulesTableRenderer extends DefaultTableCellRenderer { - + private class IngestModulesTableRenderer extends DefaultTableCellRenderer { List tooltips = new ArrayList<>(); - public ModulesTableRenderer() { - for (IngestModuleTemplate moduleTemplate : moduleModels) { + public IngestModulesTableRenderer() { + for (IngestModuleModel moduleTemplate : modules) { tooltips.add(moduleTemplate.getModuleDescription()); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java index 5580dfa171..821d5b0f0b 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java @@ -22,40 +22,27 @@ package org.sleuthkit.autopsy.ingest; import java.io.Serializable; /** - * RJCTODO + * Combines an ingest module factory with ingest options and an enabled flag to + * create a template for creating fully configured ingest modules. */ public class IngestModuleTemplate { private final IngestModuleFactory moduleFactory; private Serializable ingestOptions = null; - private boolean enabled = true; - IngestModuleTemplate(IngestModuleFactory moduleFactory, Serializable ingestOptions, boolean enabled) { + IngestModuleTemplate(IngestModuleFactory moduleFactory, Serializable ingestOptions) { this.moduleFactory = moduleFactory; this.ingestOptions = ingestOptions; - this.enabled = enabled; } - String getModuleDisplayName() { - return moduleFactory.getModuleDisplayName(); + IngestModuleFactory getIngestModuleFactory() { + return moduleFactory; } - String getModuleDescription() { - return moduleFactory.getModuleDescription(); - } - Serializable getIngestOptions() { return ingestOptions; } void setIngestOptions(Serializable ingestOptions) { this.ingestOptions = ingestOptions; - } - - void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - boolean isEnabled() { - return enabled; - } + } } From 2155d2567c720ea6281573614098c9ee0db4473f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 21 Feb 2014 13:45:12 -0500 Subject: [PATCH 11/48] Completed integration of first draft of new ingest config code --- .../AddImageWizardIngestConfigPanel.java | 12 +- .../ingest/GeneralIngestConfigurator.java | 321 ----------------- .../ingest/IngestConfigurationPanel.java | 2 +- .../autopsy/ingest/IngestConfigurator.java | 337 +++++++++++++++--- .../autopsy/ingest/IngestDialog.java | 25 +- 5 files changed, 313 insertions(+), 384 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/GeneralIngestConfigurator.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java index c4c2f6d4dd..783b6d8ed7 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java @@ -73,8 +73,8 @@ class AddImageWizardIngestConfigPanel implements WizardDescriptor.Panel messages = ingestConfig.setContext(AddImageWizardIngestConfigPanel.class.getCanonicalName()); + ingestConfig = new IngestConfigurator(AddImageWizardIngestConfigPanel.class.getCanonicalName()); + List messages = ingestConfig.getMissingIngestModuleErrorMessages(); if (messages.isEmpty() == false) { StringBuilder warning = new StringBuilder(); for (String message : messages) { @@ -187,8 +187,12 @@ class AddImageWizardIngestConfigPanel implements WizardDescriptor.Panel sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.sleuthkit.autopsy.ingest; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import javax.swing.JPanel; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; -import org.sleuthkit.datamodel.Content; - -/** - * Controller to allow a user to set context-sensitive ingest module options, - * enable/disable ingest modules, and set general ingest options. Provides an - * ingest module module model class and instances of a UI component to its - * clients (Model-View-Controller design pattern). - */ -public class GeneralIngestConfigurator { - private static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules"; - private static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules"; - private static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space"; - private final IngestManager ingestManager = IngestManager.getDefault(); - private String context = null; - private boolean processUnallocatedSpace = false; - private IngestConfigurationPanel ingestConfigPanel = null; - private List contentToIngest = null; // RJCTODO: Remove if start() method removed - - public class IngestConfigurationException extends Exception { - IngestConfigurationException(String message) { - super(message); - } - } - - /** - * RJCTODO - * @param context - */ - public GeneralIngestConfigurator() { - } - - /** - * RJCTODO - * @param context - * @return - */ - public List setContext(String context) { - this.context = context; - return initializeForContext(); - } - - private List initializeForContext() { - // Get the enabled and disabled ingest modules settings for the current - // context. The default settings make all ingest modules enabled. - List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); // RJCTODO: Put in uniqueness test in loader! - HashSet loadedModuleNames = new HashSet<>(); - for (IngestModuleFactory moduleFactory : moduleFactories) { - loadedModuleNames.add(moduleFactory.getModuleDisplayName()); - } - HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(loadedModuleNames)); - HashSet disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, ""); - - // Create ingest module templates for the ingest module pipelines and - // wrap them in ingest module models to pass to the ingest configuration - // panel (view). The initial enabled/disabled state of the module models - // comes from the context-sensitive settings. - HashSet knownModuleNames = new HashSet<>(); - List modules = new ArrayList<>(); - for (IngestModuleFactory moduleFactory : moduleFactories) { - // NOTE: In the future, this code will be modified to get the ingest - // options for each modules for the current context; for now just - // get the default ingest options. - IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, moduleFactory.getDefaultIngestOptions()); - String moduleName = moduleFactory.getModuleDisplayName(); - IngestModuleModel module = new IngestModuleModel(moduleTemplate, enabledModuleNames.contains(moduleName)); - if (!enabledModuleNames.contains(moduleName) && !enabledModuleNames.contains(moduleName)) { - // The module factory was loaded, but the module name does not - // appear in the enabled/disabled module settings. Treat the - // module as a new module and enable it by default. - module.setEnabled(true); - enabledModuleNames.add(moduleName); // RJCTODO: Put in uniqueness test, i.e., check return value! - } - modules.add(module); - knownModuleNames.add(moduleName); - } - - // Check for missing modules and update the enabled/disabled ingest - // module settings. This way the settings for the context will be - // up-to-date, even if save() is never called. - List errorMessages = new ArrayList<>(); - for (String moduleName : enabledModuleNames) { - if (!knownModuleNames.contains(moduleName)) { - errorMessages.add(moduleName + " was previously enabled, but could not be found"); - enabledModuleNames.remove(moduleName); - disabledModuleNames.add(moduleName); - } - } - ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); - ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - - // Get the process unallocated space flag setting. If the setting does - // not exist yet, default it to false. - if (ModuleSettings.settingExists(context, PARSE_UNALLOC_SPACE_KEY) == false) { - ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, "false"); - } - processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); - - // Make the configuration panel for the current context (view). - ingestConfigPanel = new IngestConfigurationPanel(modules, processUnallocatedSpace); - - return errorMessages; - } - - private static String makeCommaSeparatedList(HashSet input) { - if (input == null || input.isEmpty()) { - return ""; - } - - ArrayList list = new ArrayList<>(); - list.addAll(input); - StringBuilder csvList = new StringBuilder(); - for (int i = 0; i < list.size() - 1; ++i) { - csvList.append(list.get(i)).append(", "); - } - csvList.append(list.get(list.size() - 1)); - return csvList.toString(); - } - - private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { - // Get the ingest modules setting from the user's config file. - // If there is no such setting yet, create the default setting. - if (ModuleSettings.settingExists(context, key) == false) { - ModuleSettings.setConfigSetting(context, key, defaultSetting); - } - HashSet moduleNames = new HashSet<>(); - String modulesSetting = ModuleSettings.getConfigSetting(context, key); - if (!modulesSetting.isEmpty()) { - String[] settingNames = modulesSetting.split(", "); - for (String name : settingNames) { - // Map some old core module names to the current core module names. - switch (name) { - case "Thunderbird Parser": - case "MBox Parser": - moduleNames.add("Email Parser"); - break; - case "File Extension Mismatch Detection": - case "Extension Mismatch Detector": - moduleNames.add("File Extension Mismatch Detector"); - break; - default: - moduleNames.add(name); - } - } - } - return moduleNames; - } - - public JPanel getIngestConfigPanel() throws IngestConfigurationException { - if (null == context || null == ingestConfigPanel) { - throw new IngestConfigurationException("Ingest context not set"); - } - - return ingestConfigPanel; - } - - public void save() throws IngestConfigurationException { - if (null == context || null == ingestConfigPanel) { - throw new IngestConfigurationException("Ingest context not set"); - } - - List modules = ingestConfigPanel.getIngestModules(); - - // Save the enbaled/disabled ingest module settings for the current context. - HashSet enabledModuleNames = new HashSet<>(); - HashSet disabledModuleNames = new HashSet<>(); - for (IngestModuleModel module : modules) { - if (module.isEnabled()) { - enabledModuleNames.add(module.getModuleName()); - } - else { - disabledModuleNames.add(module.getModuleName()); - } - } - ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); - ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - - // Save the process unallocated space setting for this context. - String processUnalloc = Boolean.toString(ingestConfigPanel.getProcessUnallocSpace()); - ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, processUnalloc); - - // Get the ingest module options for each ingest module. - // NOTE: In the future, this code will be modified to persist the ingest - // options for each ingest module for the current context. - // RJCTODO: Decide whether to set the ingest options here or in the dialog; in the dialog allows corrections by user -// if (currentModule != null && currentModule.hasSimpleConfiguration()) { -// currentModule.saveSimpleConfiguration(); -// } - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public void setContent(List inputContent) { - this.contentToIngest = inputContent; - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public void start() { - // Get the list of ingest modules selected by the user. - // RJCTODO: -// List modulesToStart = ingestConfigPanel.getModulesToStart(); - List modulesToStart = new ArrayList<>(); - - // Get the user's selection of whether or not to process unallocated space. - ingestManager.setProcessUnallocSpace(processUnallocatedSpace); - - if (!modulesToStart.isEmpty() && contentToIngest != null) { - // Queue the ingest process. - ingestManager.scheduleDataSource(modulesToStart, contentToIngest); - } - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public boolean isIngestRunning() { - return ingestManager.isIngestRunning(); - } - - /** - * A model of an ingest module tailored for the view used to configure - * ingest modules. - */ - static class IngestModuleModel { - private final IngestModuleTemplate moduleTemplate; - private final IngestModuleFactory moduleFactory; - private final JPanel ingestOptionsPanel; - private final JPanel globalOptionsPanel; - private boolean enabled = true; - - IngestModuleModel(IngestModuleTemplate moduleTemplate, boolean enabled) { - this.moduleTemplate = moduleTemplate; - moduleFactory = moduleTemplate.getIngestModuleFactory(); - if (moduleFactory.providesIngestOptionsPanels()) { - ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(moduleTemplate.getIngestOptions()); - } - else { - ingestOptionsPanel = null; - } - if (moduleFactory.providesGlobalOptionsPanels()) { - globalOptionsPanel = moduleFactory.getGlobalOptionsPanel(); - } - else { - globalOptionsPanel = null; - } - this.enabled = enabled; - } - - String getModuleName() { - return moduleFactory.getModuleDisplayName(); - } - - String getModuleDescription() { - return moduleFactory.getModuleDescription(); - } - - void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - boolean isEnabled() { - return enabled; - } - - boolean hasIngestOptionsPanel() { - return moduleFactory.providesIngestOptionsPanels(); - } - - JPanel getIngestOptionsPanel() { - return ingestOptionsPanel; - } - - boolean hasGlobalOptionsPanel() { - return moduleFactory.providesGlobalOptionsPanels(); - } - - JPanel getGlobalOptionsPanel() { - return globalOptionsPanel; - } - - void saveGlobalOptions() throws IngestModuleFactory.InvalidOptionsException { - // RJCTODO: Check for null. - moduleFactory.saveGlobalOptionsFromPanel(globalOptionsPanel); - } - - private IngestModuleTemplate getIngestModuleTemplate() { - return moduleTemplate; - } - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java index abe805df15..c2677c6061 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java @@ -33,7 +33,7 @@ import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableColumn; import org.sleuthkit.autopsy.corecomponents.AdvancedConfigurationDialog; -import org.sleuthkit.autopsy.ingest.GeneralIngestConfigurator.IngestModuleModel; +import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; /** * User interface component to allow a user to set ingest module options and diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java index 94ef6924b9..e11360a09c 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java @@ -1,84 +1,315 @@ /* * Autopsy Forensic Browser - * - * Copyright 2011-2013 Basis Technology Corp. + * + * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ + package org.sleuthkit.autopsy.ingest; +import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import javax.swing.JPanel; +import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.datamodel.Content; /** - * Instances of this class provide the following services: - * 1. A way to save and load the ingest process configuration settings for a - * given ingest process context. - * 2. A UI component for configuring ingest process settings. - * 3. A way to specify input content and start the ingest process for a - * given ingest process context. + * Controller to allow a user to set context-sensitive ingest module options, + * enable/disable ingest modules, and set general ingest options. Provides an + * ingest module module model class and instances of a UI component to its + * clients (Model-View-Controller design pattern). */ -// @@@ This interface needs to be re-designed. An interface for allowing the -// authors of ingest modules to expose context sensitive module configuration -// settings needs to be provided; there also needs to be a way for users to -// configure the ingest process that uses those modules. These are separate -// concerns; likewise, kicking off an ingest process for particular content in -// a particular context is a separate concern. -public interface IngestConfigurator { - /** - * Specifies the ingest process context for the purpose of choosing, saving, - * and loading ingest process configuration settings; also determines what - * configuration settings will be in effect if the setContent() and start() - * methods are called to start the ingest process for some content specified - * using the setContent() method. - * @return A list, possibly empty, of messages describing errors that - * occurred when loading the configuration settings. - */ - public List setContext(String contextName); +public class IngestConfigurator { + private static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules"; + private static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules"; + private static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space"; + private final IngestManager ingestManager = IngestManager.getDefault(); + private final String context; + private List missingIngestModuleErrorMessages = new ArrayList<>(); + private boolean processUnallocatedSpace = false; + private IngestConfigurationPanel ingestConfigPanel = null; + private List contentToIngest = null; // RJCTODO: Remove if start() method removed + + public class IngestConfigurationException extends Exception { + IngestConfigurationException(String message) { + super(message); + } + } /** - * Provides a UI component for choosing ingest process configuration - * settings for the ingest process context specified using the setContext() - * method. + * RJCTODO + * @param context */ - JPanel getIngestConfigPanel(); + public IngestConfigurator(String context) { + this.context = context; + initializeForContext(); + } + + /** + * RJCTODO + * @return + */ + public List getMissingIngestModuleErrorMessages() { + return missingIngestModuleErrorMessages; + } + + private void initializeForContext() { + // Get the enabled and disabled ingest modules settings for the current + // context. The default settings make all ingest modules enabled. + List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); // RJCTODO: Put in uniqueness test in loader! + HashSet loadedModuleNames = new HashSet<>(); + for (IngestModuleFactory moduleFactory : moduleFactories) { + loadedModuleNames.add(moduleFactory.getModuleDisplayName()); + } + HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(loadedModuleNames)); + HashSet disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, ""); + + // Create ingest module templates for the ingest module pipelines and + // wrap them in ingest module models to pass to the ingest configuration + // panel (view). The initial enabled/disabled state of the module models + // comes from the context-sensitive settings. + HashSet knownModuleNames = new HashSet<>(); + List modules = new ArrayList<>(); + for (IngestModuleFactory moduleFactory : moduleFactories) { + // NOTE: In the future, this code will be modified to get the ingest + // options for each modules for the current context; for now just + // get the default ingest options. + IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, moduleFactory.getDefaultIngestOptions()); + String moduleName = moduleFactory.getModuleDisplayName(); + IngestConfigurator.IngestModuleModel module = new IngestConfigurator.IngestModuleModel(moduleTemplate, enabledModuleNames.contains(moduleName)); + if (!enabledModuleNames.contains(moduleName) && !enabledModuleNames.contains(moduleName)) { + // The module factory was loaded, but the module name does not + // appear in the enabled/disabled module settings. Treat the + // module as a new module and enable it by default. + module.setEnabled(true); + enabledModuleNames.add(moduleName); // RJCTODO: Put in uniqueness test, i.e., check return value! + } + modules.add(module); + knownModuleNames.add(moduleName); + } + + // Check for missing modules and update the enabled/disabled ingest + // module settings. This way the settings for the context will be + // up-to-date, even if save() is never called. + for (String moduleName : enabledModuleNames) { + if (!knownModuleNames.contains(moduleName)) { + missingIngestModuleErrorMessages.add(moduleName + " was previously enabled, but could not be found"); + enabledModuleNames.remove(moduleName); + disabledModuleNames.add(moduleName); + } + } + ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - /** - * Saves the ingest process configuration settings for the ingest process - * context specified using the setContext() method. - */ - void save(); + // Get the process unallocated space flag setting. If the setting does + // not exist yet, default it to false. + if (ModuleSettings.settingExists(context, PARSE_UNALLOC_SPACE_KEY) == false) { + ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, "false"); + } + processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); + + // Make the configuration panel for the current context (view). + ingestConfigPanel = new IngestConfigurationPanel(modules, processUnallocatedSpace); + } - /** - * Sets the input content for an ingest process prior to calling start() to - * run the process using the process configuration settings for the context - * specified using setContext(). - */ - void setContent(List inputContent); + private static String makeCommaSeparatedList(HashSet input) { + if (input == null || input.isEmpty()) { + return ""; + } + + ArrayList list = new ArrayList<>(); + list.addAll(input); + StringBuilder csvList = new StringBuilder(); + for (int i = 0; i < list.size() - 1; ++i) { + csvList.append(list.get(i)).append(", "); + } + csvList.append(list.get(list.size() - 1)); + return csvList.toString(); + } + + private HashSet getModulesNamesFromSetting(String key, String defaultSetting) { + // Get the ingest modules setting from the user's config file. + // If there is no such setting yet, create the default setting. + if (ModuleSettings.settingExists(context, key) == false) { + ModuleSettings.setConfigSetting(context, key, defaultSetting); + } + HashSet moduleNames = new HashSet<>(); + String modulesSetting = ModuleSettings.getConfigSetting(context, key); + if (!modulesSetting.isEmpty()) { + String[] settingNames = modulesSetting.split(", "); + for (String name : settingNames) { + // Map some old core module names to the current core module names. + switch (name) { + case "Thunderbird Parser": + case "MBox Parser": + moduleNames.add("Email Parser"); + break; + case "File Extension Mismatch Detection": + case "Extension Mismatch Detector": + moduleNames.add("File Extension Mismatch Detector"); + break; + default: + moduleNames.add(name); + } + } + } + return moduleNames; + } + + public JPanel getIngestConfigPanel() { + return ingestConfigPanel; + } + + public void save() throws IngestConfigurationException { + if (null == context || null == ingestConfigPanel) { + throw new IngestConfigurationException("Ingest context not set"); + } + + List modules = ingestConfigPanel.getIngestModules(); + + // Save the enbaled/disabled ingest module settings for the current context. + HashSet enabledModuleNames = new HashSet<>(); + HashSet disabledModuleNames = new HashSet<>(); + for (IngestModuleModel module : modules) { + if (module.isEnabled()) { + enabledModuleNames.add(module.getModuleName()); + } + else { + disabledModuleNames.add(module.getModuleName()); + } + } + ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); + + // Save the process unallocated space setting for this context. + String processUnalloc = Boolean.toString(ingestConfigPanel.getProcessUnallocSpace()); + ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, processUnalloc); + + // Get the ingest module options for each ingest module. + // NOTE: In the future, this code will be modified to persist the ingest + // options for each ingest module for the current context. + // RJCTODO: Decide whether to set the ingest options here or in the dialog; in the dialog allows corrections by user +// if (currentModule != null && currentModule.hasSimpleConfiguration()) { +// currentModule.saveSimpleConfiguration(); +// } + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public void setContent(List inputContent) { + this.contentToIngest = inputContent; + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public void start() { + // Get the list of ingest modules selected by the user. + // RJCTODO: +// List modulesToStart = ingestConfigPanel.getModulesToStart(); + List modulesToStart = new ArrayList<>(); + + // Get the user's selection of whether or not to process unallocated space. + ingestManager.setProcessUnallocSpace(processUnallocatedSpace); + + if (!modulesToStart.isEmpty() && contentToIngest != null) { + // Queue the ingest process. + ingestManager.scheduleDataSource(modulesToStart, contentToIngest); + } + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public boolean isIngestRunning() { + return ingestManager.isIngestRunning(); + } /** - * Starts (queues) the ingest process for the content specified using the - * setContent() method, using the configuration settings corresponding to - * the ingest process context specified using the setContext() method. + * A model of an ingest module tailored for the view used to configure + * ingest modules. */ - void start(); + static class IngestModuleModel { + private final IngestModuleTemplate moduleTemplate; + private final IngestModuleFactory moduleFactory; + private final JPanel ingestOptionsPanel; + private final JPanel globalOptionsPanel; + private boolean enabled = true; + + IngestModuleModel(IngestModuleTemplate moduleTemplate, boolean enabled) { + this.moduleTemplate = moduleTemplate; + moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.providesIngestOptionsPanels()) { + ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(moduleTemplate.getIngestOptions()); + } + else { + ingestOptionsPanel = null; + } + if (moduleFactory.providesGlobalOptionsPanels()) { + globalOptionsPanel = moduleFactory.getGlobalOptionsPanel(); + } + else { + globalOptionsPanel = null; + } + this.enabled = enabled; + } + + String getModuleName() { + return moduleFactory.getModuleDisplayName(); + } - /** - * Returns true if any ingest process is running, false otherwise. - * Note that the running process may or may not be the process started - * (queued) by an invocation of the start() method. - */ - boolean isIngestRunning(); + String getModuleDescription() { + return moduleFactory.getModuleDescription(); + } + + void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + boolean isEnabled() { + return enabled; + } + + boolean hasIngestOptionsPanel() { + return moduleFactory.providesIngestOptionsPanels(); + } + + JPanel getIngestOptionsPanel() { + return ingestOptionsPanel; + } + + boolean hasGlobalOptionsPanel() { + return moduleFactory.providesGlobalOptionsPanels(); + } + + JPanel getGlobalOptionsPanel() { + return globalOptionsPanel; + } + + void saveGlobalOptions() throws IngestModuleFactory.InvalidOptionsException { + // RJCTODO: Check for null. + moduleFactory.saveGlobalOptionsFromPanel(globalOptionsPanel); + } + + private IngestModuleTemplate getIngestModuleTemplate() { + return moduleTemplate; + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java index fb68c15a5e..88315d80e5 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java @@ -46,8 +46,8 @@ public class IngestDialog extends JDialog { public IngestDialog(JFrame frame, String title, boolean modal) { super(frame, title, modal); - ingestConfigurator = new GeneralIngestConfigurator(); - List messages = ingestConfigurator.setContext(IngestDialog.class.getCanonicalName()); + ingestConfigurator = new IngestConfigurator(IngestDialog.class.getCanonicalName()); + List messages = ingestConfigurator.getMissingIngestModuleErrorMessages(); if (messages.isEmpty() == false) { StringBuilder warning = new StringBuilder(); for (String message : messages) { @@ -83,7 +83,12 @@ public class IngestDialog extends JDialog { @Override public void actionPerformed(ActionEvent e) { - ingestConfigurator.save(); + try { + ingestConfigurator.save(); + } + catch (IngestConfigurator.IngestConfigurationException ex) { + // RJCTODO: Decide what to do here. + } ingestConfigurator.start(); close(); } @@ -92,7 +97,12 @@ public class IngestDialog extends JDialog { @Override public void actionPerformed(ActionEvent e) { - ingestConfigurator.save(); + try { + ingestConfigurator.save(); + } + catch (IngestConfigurator.IngestConfigurationException ex) { + // RJCTODO: Decide what to do here. + } close(); } }); @@ -100,7 +110,12 @@ public class IngestDialog extends JDialog { @Override public void windowClosing(WindowEvent e) { - ingestConfigurator.save(); + try { + ingestConfigurator.save(); + } + catch (IngestConfigurator.IngestConfigurationException ex) { + // RJCTODO: Decide what to do here. + } close(); } }); From 751982bfa158b735d1ad1d252379427f87af30ba Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Fri, 21 Feb 2014 17:46:11 -0500 Subject: [PATCH 12/48] Start adding ingest pipeline infrastructure --- .../autopsy/ingest/IngestManager.java | 15 + .../autopsy/ingest/IngestModuleLoader.java | 483 +++++++++--------- .../autopsy/ingest/IngestPipelines.java | 43 ++ 3 files changed, 295 insertions(+), 246 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 8c4250901b..a5f3d22302 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -1303,4 +1303,19 @@ public class IngestManager { scheduler.getDataSourceScheduler().empty(); } } + + private class FileIngester implements Runnable { + @Override + public void run() { + final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); + while (fileScheduler.hasNext()) { + final FileTask fileTask = fileScheduler.next(); + final DataSourceTask dataSourceTask = fileTask.getDataSourceTask(); + final AbstractFile fileToProcess = fileTask.getFile(); + + fileToProcess.close(); + } + logger.log(Level.INFO, "IngestManager: Finished processing files"); + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index c3ea5433a2..e1c140e4ec 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -471,48 +471,49 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; private void autodiscover() throws IngestModuleLoaderException { Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); - moduleFactories.addAll(factories); - -// moduleFactories - - // Use Lookup to find the other NBM modules. We'll later search them for ingest modules - Collection moduleInfos = Lookup.getDefault().lookupAll(ModuleInfo.class); - logger.log(Level.INFO, "Autodiscovery, found #platform modules: " + moduleInfos.size()); - - Set urls = getJarPaths(moduleInfos); - ArrayList reflectionsSet = new ArrayList<>(); - - for (final ModuleInfo moduleInfo : moduleInfos) { - if (moduleInfo.isEnabled()) { - /* NOTE: We have an assumption here that the modules in an NBM will - * have the same package name as the NBM name. This means that - * an NBM can have only one package with modules in it. */ - String basePackageName = moduleInfo.getCodeNameBase(); - - // skip the standard ones - if (basePackageName.startsWith("org.netbeans") - || basePackageName.startsWith("org.openide")) { - continue; - } - - logger.log(Level.INFO, "Found module: " + moduleInfo.getDisplayName() + " " + basePackageName - + " Build version: " + moduleInfo.getBuildVersion() - + " Spec version: " + moduleInfo.getSpecificationVersion() - + " Impl version: " + moduleInfo.getImplementationVersion()); - - ConfigurationBuilder cb = new ConfigurationBuilder(); - cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(basePackageName))); - cb.setUrls(urls); - cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); - reflectionsSet.add(new Reflections(cb)); - } - else { - // log if we have our own modules disabled - if (moduleInfo.getCodeNameBase().startsWith("org.sleuthkit")) { - logger.log(Level.WARNING, "Sleuth Kit Module not enabled: " + moduleInfo.getDisplayName()); - } - } + for (IngestModuleFactory factory : factories) { + logger.log(Level.INFO, "Loaded ingest module factory: name = " + factory.getModuleDisplayName() + ", version = " + factory.getModuleVersionNumber()); + moduleFactories.add(factory); } + +// // Use Lookup to find the other NBM modules. We'll later search them for ingest modules +// Collection moduleInfos = Lookup.getDefault().lookupAll(ModuleInfo.class); +// logger.log(Level.INFO, "Autodiscovery, found #platform modules: " + moduleInfos.size()); +// +// Set urls = getJarPaths(moduleInfos); +// ArrayList reflectionsSet = new ArrayList<>(); +// +// for (final ModuleInfo moduleInfo : moduleInfos) { +// if (moduleInfo.isEnabled()) { +// /* NOTE: We have an assumption here that the modules in an NBM will +// * have the same package name as the NBM name. This means that +// * an NBM can have only one package with modules in it. */ +// String basePackageName = moduleInfo.getCodeNameBase(); +// +// // skip the standard ones +// if (basePackageName.startsWith("org.netbeans") +// || basePackageName.startsWith("org.openide")) { +// continue; +// } +// +// logger.log(Level.INFO, "Found module: " + moduleInfo.getDisplayName() + " " + basePackageName +// + " Build version: " + moduleInfo.getBuildVersion() +// + " Spec version: " + moduleInfo.getSpecificationVersion() +// + " Impl version: " + moduleInfo.getImplementationVersion()); +// +// ConfigurationBuilder cb = new ConfigurationBuilder(); +// cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(basePackageName))); +// cb.setUrls(urls); +// cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); +// reflectionsSet.add(new Reflections(cb)); +// } +// else { +// // log if we have our own modules disabled +// if (moduleInfo.getCodeNameBase().startsWith("org.sleuthkit")) { +// logger.log(Level.WARNING, "Sleuth Kit Module not enabled: " + moduleInfo.getDisplayName()); +// } +// } +// } /* This area is used to load the example modules. They are not found via lookup since they * are in this NBM module. @@ -526,111 +527,104 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; reflectionsSet.add(new Reflections(cb)); */ - for (Reflections reflections : reflectionsSet) { - - Set fileModules = reflections.getSubTypesOf(IngestModuleAbstractFile.class); - Iterator it = fileModules.iterator(); - while (it.hasNext()) { - logger.log(Level.INFO, "Found file ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); - } - - Set dataSourceModules = reflections.getSubTypesOf(IngestModuleDataSource.class); - it = dataSourceModules.iterator(); - while (it.hasNext()) { - logger.log(Level.INFO, "Found DataSource ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); - } - - if ((fileModules.isEmpty()) && (dataSourceModules.isEmpty())) { - logger.log(Level.INFO, "Module has no ingest modules: " + reflections.getClass().getSimpleName()); - continue; - } - - //find out which modules to add - //TODO check which modules to remove (which modules were uninstalled) - boolean modulesChanged = false; - - it = fileModules.iterator(); - while (it.hasNext()) { - boolean exists = false; - Class foundClass = (Class) it.next(); - - for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { - if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) { - continue; //skip - } - - for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { - //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); - if (foundClass.getName().equals(rawM.location)) { - exists = true; - break; - } - } - if (exists == true) { - break; - } - } - - if (exists == false) { - logger.log(Level.INFO, "Discovered a new file module to load: " + foundClass.getName()); - //ADD MODULE - addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS); - modulesChanged = true; - } - - } - - it = dataSourceModules.iterator(); - while (it.hasNext()) { - boolean exists = false; - Class foundClass = (Class) it.next(); - - for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { - if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS.toString())) { - continue; //skip - } - - - for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { - //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); - if (foundClass.getName().equals(rawM.location)) { - exists = true; - break; - } - } - if (exists == true) { - break; - } - } - - if (exists == false) { - logger.log(Level.INFO, "Discovered a new DataSource module to load: " + foundClass.getName()); - //ADD MODULE - addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS); - modulesChanged = true; - } - - } - - if (modulesChanged) { - save(); - - try { - pcs.firePropertyChange(IngestModuleLoader.Event.ModulesReloaded.toString(), 0, 1); - } - catch (Exception e) { - logger.log(Level.SEVERE, "IngestModuleLoader listener threw exception", e); - MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to IngestModuleLoader updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); - } - } - - /* - //Enumeration resources = moduleClassLoader.getResources(basePackageName); - Enumeration resources = classLoader.getResources(basePackageName); - while (resources.hasMoreElements()) { - System.out.println(resources.nextElement()); - } */ - } +// for (Reflections reflections : reflectionsSet) { +// +// Set fileModules = reflections.getSubTypesOf(IngestModuleAbstractFile.class); +// Iterator it = fileModules.iterator(); +// while (it.hasNext()) { +// logger.log(Level.INFO, "Found file ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); +// } +// +// Set dataSourceModules = reflections.getSubTypesOf(IngestModuleDataSource.class); +// it = dataSourceModules.iterator(); +// while (it.hasNext()) { +// logger.log(Level.INFO, "Found DataSource ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); +// } +// +// if ((fileModules.isEmpty()) && (dataSourceModules.isEmpty())) { +// logger.log(Level.INFO, "Module has no ingest modules: " + reflections.getClass().getSimpleName()); +// continue; +// } +// +// //find out which modules to add +// //TODO check which modules to remove (which modules were uninstalled) +// boolean modulesChanged = false; +// +// it = fileModules.iterator(); +// while (it.hasNext()) { +// boolean exists = false; +// Class foundClass = (Class) it.next(); +// +// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { +// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) { +// continue; //skip +// } +// +// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { +// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); +// if (foundClass.getName().equals(rawM.location)) { +// exists = true; +// break; +// } +// } +// if (exists == true) { +// break; +// } +// } +// +// if (exists == false) { +// logger.log(Level.INFO, "Discovered a new file module to load: " + foundClass.getName()); +// //ADD MODULE +// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS); +// modulesChanged = true; +// } +// +// } +// +// it = dataSourceModules.iterator(); +// while (it.hasNext()) { +// boolean exists = false; +// Class foundClass = (Class) it.next(); +// +// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { +// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS.toString())) { +// continue; //skip +// } +// +// +// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { +// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); +// if (foundClass.getName().equals(rawM.location)) { +// exists = true; +// break; +// } +// } +// if (exists == true) { +// break; +// } +// } +// +// if (exists == false) { +// logger.log(Level.INFO, "Discovered a new DataSource module to load: " + foundClass.getName()); +// //ADD MODULE +// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS); +// modulesChanged = true; +// } +// +// } +// +// if (modulesChanged) { +// save(); +// +// try { +// pcs.firePropertyChange(IngestModuleLoader.Event.ModulesReloaded.toString(), 0, 1); +// } +// catch (Exception e) { +// logger.log(Level.SEVERE, "IngestModuleLoader listener threw exception", e); +// MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to IngestModuleLoader updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); +// } +// } +// } } /** @@ -756,100 +750,97 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; private void instantiate() throws IngestModuleLoaderException { //clear current - filePipeline.clear(); - dataSourcePipeline.clear(); +// filePipeline.clear(); +// dataSourcePipeline.clear(); //add autodiscovered modules to pipelinesXML autodiscover(); //validate all modules: from XML + just autodiscovered - validate(); - - for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) { - if (pRaw.valid == false) { - //skip invalid pipelines - continue; - } - - //sort modules by order parameter, in case XML order is different - Collections.sort(pRaw.modules, new Comparator() { - @Override - public int compare(IngestModuleLoader.XmlModuleRaw o1, IngestModuleLoader.XmlModuleRaw o2) { - return Integer.valueOf(o1.order).compareTo(Integer.valueOf(o2.order)); - } - }); - - //check pipelineType, add to right pipeline collection - IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pRaw.type); - - for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) { - try { - if (pMod.valid == false) { - //skip invalid modules - continue; - } - - //add to right pipeline - switch (pType) { - case FILE_ANALYSIS: - IngestModuleAbstractFile fileModuleInstance = null; - final Class fileModuleClass = - (Class) Class.forName(pMod.location, true, classLoader); - try { - Method getDefaultMethod = fileModuleClass.getMethod("getDefault"); - if (getDefaultMethod != null) { - fileModuleInstance = (IngestModuleAbstractFile) getDefaultMethod.invoke(null); - } - } catch (NoSuchMethodException ex) { - logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); - pMod.valid = false; //prevent from trying to load again - } catch (SecurityException ex) { - logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); - pMod.valid = false; //prevent from trying to load again - } catch (IllegalAccessException ex) { - logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); - pMod.valid = false; //prevent from trying to load again - } catch (InvocationTargetException ex) { - logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); - pMod.valid = false; //prevent from trying to load again - } - - filePipeline.add(fileModuleInstance); - break; - case DATA_SOURCE_ANALYSIS: - final Class dataSourceModuleClass = - (Class) Class.forName(pMod.location, true, classLoader); - - try { - Constructor constr = dataSourceModuleClass.getConstructor(); - IngestModuleDataSource dataSourceModuleInstance = constr.newInstance(); - - if (dataSourceModuleInstance != null) { - dataSourcePipeline.add(dataSourceModuleInstance); - } - - } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { - logger.log(Level.WARNING, "Validated module, could not initialize, check for bugs in the module: " + pMod.location, ex); - pMod.valid = false; - } - - - break; - default: - logger.log(Level.SEVERE, "Unexpected pipeline type to add module to: " + pType); - } - - - } catch (ClassNotFoundException ex) { - logger.log(Level.SEVERE, "Validated module, but could not load (shouldn't happen): " + pMod.location); - } - } - - } //end instantiating modules in XML - - - +// validate(); +// +// for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) { +// if (pRaw.valid == false) { +// //skip invalid pipelines +// continue; +// } +// +// //sort modules by order parameter, in case XML order is different +// Collections.sort(pRaw.modules, new Comparator() { +// @Override +// public int compare(IngestModuleLoader.XmlModuleRaw o1, IngestModuleLoader.XmlModuleRaw o2) { +// return Integer.valueOf(o1.order).compareTo(Integer.valueOf(o2.order)); +// } +// }); +// +// //check pipelineType, add to right pipeline collection +// IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pRaw.type); +// +// for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) { +// try { +// if (pMod.valid == false) { +// //skip invalid modules +// continue; +// } +// +// //add to right pipeline +// switch (pType) { +// case FILE_ANALYSIS: +// IngestModuleAbstractFile fileModuleInstance = null; +// final Class fileModuleClass = +// (Class) Class.forName(pMod.location, true, classLoader); +// try { +// Method getDefaultMethod = fileModuleClass.getMethod("getDefault"); +// if (getDefaultMethod != null) { +// fileModuleInstance = (IngestModuleAbstractFile) getDefaultMethod.invoke(null); +// } +// } catch (NoSuchMethodException ex) { +// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); +// pMod.valid = false; //prevent from trying to load again +// } catch (SecurityException ex) { +// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); +// pMod.valid = false; //prevent from trying to load again +// } catch (IllegalAccessException ex) { +// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); +// pMod.valid = false; //prevent from trying to load again +// } catch (InvocationTargetException ex) { +// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); +// pMod.valid = false; //prevent from trying to load again +// } +// +// filePipeline.add(fileModuleInstance); +// break; +// case DATA_SOURCE_ANALYSIS: +// final Class dataSourceModuleClass = +// (Class) Class.forName(pMod.location, true, classLoader); +// +// try { +// Constructor constr = dataSourceModuleClass.getConstructor(); +// IngestModuleDataSource dataSourceModuleInstance = constr.newInstance(); +// +// if (dataSourceModuleInstance != null) { +// dataSourcePipeline.add(dataSourceModuleInstance); +// } +// +// } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { +// logger.log(Level.WARNING, "Validated module, could not initialize, check for bugs in the module: " + pMod.location, ex); +// pMod.valid = false; +// } +// +// +// break; +// default: +// logger.log(Level.SEVERE, "Unexpected pipeline type to add module to: " + pType); +// } +// +// +// } catch (ClassNotFoundException ex) { +// logger.log(Level.SEVERE, "Validated module, but could not load (shouldn't happen): " + pMod.location); +// } +// } +// +// } //end instantiating modules in XML } /** @@ -957,18 +948,18 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * @throws IngestModuleLoaderException */ public synchronized void init() throws IngestModuleLoaderException { - absFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINE_CONFIG_XML; - ClassLoader parentClassLoader = Lookup.getDefault().lookup(ClassLoader.class); - classLoader = new CustomClassLoader(parentClassLoader); - - try { - boolean extracted = PlatformUtil.extractResourceToUserConfigDir(IngestModuleLoader.class, PIPELINE_CONFIG_XML); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir ", ex); - } - - //load the pipeline config - loadRawPipeline(); +// absFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINE_CONFIG_XML; +// ClassLoader parentClassLoader = Lookup.getDefault().lookup(ClassLoader.class); +// classLoader = new CustomClassLoader(parentClassLoader); +// +// try { +// boolean extracted = PlatformUtil.extractResourceToUserConfigDir(IngestModuleLoader.class, PIPELINE_CONFIG_XML); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir ", ex); +// } +// +// //load the pipeline config +// loadRawPipeline(); instantiate(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java new file mode 100755 index 0000000000..bc3e786faf --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java @@ -0,0 +1,43 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import java.util.ArrayList; +import java.util.List; + +/** + * + * @author rcordovano + */ +public class IngestPipelines { + private List fileIngestPipeline = new ArrayList<>(); + private List dataSourceIngestPipeline = new ArrayList<>(); + + IngestPipelines(List moduleTemplates) { + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + if (moduleTemplate.getIngestModuleFactory().isFileIngestModuleFactory()) { + fileIngestPipeline.add(moduleTemplate); + } + else { + dataSourceIngestPipeline.add(moduleTemplate); + } + } + } +} From 287b8d08a376887ce04e0971c9ad4f4a94bfdc79 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 24 Feb 2014 16:15:48 -0500 Subject: [PATCH 13/48] Continue adding ingest pipeline infrastructure --- .../ingest/IngestConfigurationPanel.java | 112 ++++++- .../autopsy/ingest/IngestConfigurator.java | 285 +++++++----------- .../autopsy/ingest/IngestManager.java | 14 +- .../autopsy/ingest/IngestModuleTemplate.java | 11 +- .../autopsy/ingest/IngestPipelines.java | 71 ++++- 5 files changed, 292 insertions(+), 201 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java index c2677c6061..9a8468af3d 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java @@ -25,6 +25,7 @@ import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.util.ArrayList; import java.util.List; +import javax.swing.JPanel; import javax.swing.JTable; import javax.swing.ListSelectionModel; import javax.swing.event.ListSelectionEvent; @@ -32,8 +33,8 @@ import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.TableColumn; +import java.io.Serializable; import org.sleuthkit.autopsy.corecomponents.AdvancedConfigurationDialog; -import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; /** * User interface component to allow a user to set ingest module options and @@ -41,20 +42,37 @@ import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; * provided by a controller (Model-View-Controller design pattern). */ class IngestConfigurationPanel extends javax.swing.JPanel { - private final List modules; - private IngestModuleModel selectedModule = null; + private List modules = new ArrayList<>(); private boolean processUnallocatedSpace = false; + private IngestModuleModel selectedModule = null; private IngestModulesTableModel tableModel = null; - IngestConfigurationPanel(List modules, boolean processUnallocatedSpace) { - this.modules = modules; + IngestConfigurationPanel(List moduleTemplates, boolean processUnallocatedSpace) { + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + modules.add(new IngestModuleModel(moduleTemplate)); + } this.processUnallocatedSpace = processUnallocatedSpace; initComponents(); customizeComponents(); } - List getIngestModules() { - return modules; + List getIngestModuleTemplates() { + List moduleTemplates = new ArrayList<>(); + for (IngestModuleModel module : modules) { + IngestModuleTemplate moduleTemplate = module.getIngestModuleTemplate(); + if (module.hasIngestOptionsPanel()) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + try { + Serializable options = moduleFactory.getIngestOptionsFromPanel(module.getIngestOptionsPanel()); + moduleTemplate.setIngestOptions(options); + } + catch (IngestModuleFactory.InvalidOptionsException ex) { + // RJCTODO + } + } + moduleTemplates.add(moduleTemplate); + } + return moduleTemplates; } boolean getProcessUnallocSpace() { @@ -245,8 +263,9 @@ import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; @Override public void actionPerformed(ActionEvent e) { try { - if (selectedModule.hasGlobalOptionsPanel()) - selectedModule.saveGlobalOptions(); + if (selectedModule.hasGlobalOptionsPanel()) { + selectedModule.saveGlobalOptions(); + } } catch (IngestModuleFactory.InvalidOptionsException ex) { // RJCTODO: Error message box @@ -283,6 +302,77 @@ import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; private javax.swing.ButtonGroup timeGroup; // End of variables declaration//GEN-END:variables + /** + * A decorator for an ingest module template that adds ingest and global + * options panels with lifetimes equal to that of the ingest configuration + * panel. + */ + static private class IngestModuleModel { + private final IngestModuleTemplate moduleTemplate; + private final JPanel ingestOptionsPanel; + private final JPanel globalOptionsPanel; + + IngestModuleModel(IngestModuleTemplate moduleTemplate) { + this.moduleTemplate = moduleTemplate; + + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.providesIngestOptionsPanels()) { + ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(moduleTemplate.getIngestOptions()); + } + else { + ingestOptionsPanel = null; + } + + if (moduleFactory.providesGlobalOptionsPanels()) { + globalOptionsPanel = moduleFactory.getGlobalOptionsPanel(); + } + else { + globalOptionsPanel = null; + } + } + + IngestModuleTemplate getIngestModuleTemplate() { + return moduleTemplate; + } + + String getName() { + return moduleTemplate.getIngestModuleFactory().getModuleDisplayName(); + } + + String getDescription() { + return moduleTemplate.getIngestModuleFactory().getModuleDescription(); + } + + void setEnabled(boolean enabled) { + moduleTemplate.setEnabled(enabled); + } + + boolean isEnabled() { + return moduleTemplate.isEnabled(); + } + + boolean hasIngestOptionsPanel() { + return moduleTemplate.getIngestModuleFactory().providesIngestOptionsPanels(); + } + + JPanel getIngestOptionsPanel() { + return ingestOptionsPanel; + } + + boolean hasGlobalOptionsPanel() { + return moduleTemplate.getIngestModuleFactory().providesGlobalOptionsPanels(); + } + + JPanel getGlobalOptionsPanel() { + return globalOptionsPanel; + } + + void saveGlobalOptions() throws IngestModuleFactory.InvalidOptionsException { + // RJCTODO: Check for null. + moduleTemplate.getIngestModuleFactory().saveGlobalOptionsFromPanel(globalOptionsPanel); + } + } + /** * Custom table model to display ingest module names and enable/disable * ingest modules. @@ -305,7 +395,7 @@ import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; return module.isEnabled(); } else { - return module.getModuleName(); + return module.getName(); } } @@ -336,7 +426,7 @@ import org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestModuleModel; public IngestModulesTableRenderer() { for (IngestModuleModel moduleTemplate : modules) { - tooltips.add(moduleTemplate.getModuleDescription()); + tooltips.add(moduleTemplate.getDescription()); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java index e11360a09c..61639a4dd4 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java @@ -27,9 +27,10 @@ import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.datamodel.Content; /** + * RJCTODO: Improve comment * Controller to allow a user to set context-sensitive ingest module options, * enable/disable ingest modules, and set general ingest options. Provides an - * ingest module module model class and instances of a UI component to its + * ingest module model class and instances of a UI component to its * clients (Model-View-Controller design pattern). */ public class IngestConfigurator { @@ -55,60 +56,54 @@ public class IngestConfigurator { */ public IngestConfigurator(String context) { this.context = context; - initializeForContext(); - } - - /** - * RJCTODO - * @return - */ - public List getMissingIngestModuleErrorMessages() { - return missingIngestModuleErrorMessages; - } - - private void initializeForContext() { - // Get the enabled and disabled ingest modules settings for the current - // context. The default settings make all ingest modules enabled. - List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); // RJCTODO: Put in uniqueness test in loader! + + // Get the ingest module factories discovered by the ingest module + // loader. + // RJCTODO: Put in name uniqueness test/solution in loader! + List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); HashSet loadedModuleNames = new HashSet<>(); for (IngestModuleFactory moduleFactory : moduleFactories) { loadedModuleNames.add(moduleFactory.getModuleDisplayName()); - } + } + + // Get the enabled and disabled ingest modules settings for the current + // context. The default settings make all ingest modules enabled. HashSet enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(loadedModuleNames)); HashSet disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, ""); - // Create ingest module templates for the ingest module pipelines and - // wrap them in ingest module models to pass to the ingest configuration - // panel (view). The initial enabled/disabled state of the module models - // comes from the context-sensitive settings. + // Create ingest module templates for the current context. HashSet knownModuleNames = new HashSet<>(); - List modules = new ArrayList<>(); + List moduleTemplates = new ArrayList<>(); for (IngestModuleFactory moduleFactory : moduleFactories) { // NOTE: In the future, this code will be modified to get the ingest - // options for each modules for the current context; for now just + // options for each module for the current context; for now just // get the default ingest options. IngestModuleTemplate moduleTemplate = new IngestModuleTemplate(moduleFactory, moduleFactory.getDefaultIngestOptions()); - String moduleName = moduleFactory.getModuleDisplayName(); - IngestConfigurator.IngestModuleModel module = new IngestConfigurator.IngestModuleModel(moduleTemplate, enabledModuleNames.contains(moduleName)); - if (!enabledModuleNames.contains(moduleName) && !enabledModuleNames.contains(moduleName)) { + String moduleName = moduleTemplate.getIngestModuleFactory().getModuleDisplayName(); + if (enabledModuleNames.contains(moduleName)) { + moduleTemplate.setEnabled(true); + } + else if (disabledModuleNames.contains(moduleName)) { + moduleTemplate.setEnabled(true); + } + else { // The module factory was loaded, but the module name does not // appear in the enabled/disabled module settings. Treat the // module as a new module and enable it by default. - module.setEnabled(true); - enabledModuleNames.add(moduleName); // RJCTODO: Put in uniqueness test, i.e., check return value! + moduleTemplate.setEnabled(true); + enabledModuleNames.add(moduleName); } - modules.add(module); + moduleTemplates.add(moduleTemplate); knownModuleNames.add(moduleName); } // Check for missing modules and update the enabled/disabled ingest - // module settings. This way the settings for the context will be - // up-to-date, even if save() is never called. + // module settings for any missing modules. for (String moduleName : enabledModuleNames) { if (!knownModuleNames.contains(moduleName)) { missingIngestModuleErrorMessages.add(moduleName + " was previously enabled, but could not be found"); enabledModuleNames.remove(moduleName); - disabledModuleNames.add(moduleName); + disabledModuleNames.add(moduleName); // RJCTODO: Is this the right behavior? } } ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); @@ -122,9 +117,87 @@ public class IngestConfigurator { processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); // Make the configuration panel for the current context (view). - ingestConfigPanel = new IngestConfigurationPanel(modules, processUnallocatedSpace); + ingestConfigPanel = new IngestConfigurationPanel(moduleTemplates, processUnallocatedSpace); } + + /** + * RJCTODO + * @return + */ + public List getMissingIngestModuleErrorMessages() { + return missingIngestModuleErrorMessages; + } + + /** + * RJCTODO + * @return + */ + public JPanel getIngestConfigPanel() { + return ingestConfigPanel; + } + + /** + * RJCTODO + * @throws org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestConfigurationException + */ + public void save() throws IngestConfigurationException { + List moduleTemplates = ingestConfigPanel.getIngestModuleTemplates(); + // Save the enabled/disabled ingest module settings for the current context. + HashSet enabledModuleNames = new HashSet<>(); + HashSet disabledModuleNames = new HashSet<>(); + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + String moduleName = moduleTemplate.getIngestModuleFactory().getModuleDisplayName(); + if (moduleTemplate.isEnabled()) { + enabledModuleNames.add(moduleName); + } + else { + disabledModuleNames.add(moduleName); + } + } + ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); + ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); + + // Save the process unallocated space setting for the current context. + String processUnalloc = Boolean.toString(ingestConfigPanel.getProcessUnallocSpace()); + ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, processUnalloc); + + // NOTE: In the future, this code will be modified to persist the ingest + // options for each ingest module for the current context. + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public void setContent(List inputContent) { + this.contentToIngest = inputContent; + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public void start() { + List moduleTemplates = ingestConfigPanel.getIngestModuleTemplates(); + + // Get the user's selection of whether or not to process unallocated space. + ingestManager.setProcessUnallocSpace(processUnallocatedSpace); + + if (!modulesToStart.isEmpty() && contentToIngest != null) { + // Queue the ingest process. + ingestManager.scheduleDataSource(modulesToStart, contentToIngest); + } + } + + // RJCTODO: If time permits, make it so that this class is not responsible + // starting and running the ingest - probably need to do this anyway, at + // least if the IngestConfigurator interface goes away and this becomes the + // IngestConfigurator class. + public boolean isIngestRunning() { + return ingestManager.isIngestRunning(); + } + private static String makeCommaSeparatedList(HashSet input) { if (input == null || input.isEmpty()) { return ""; @@ -167,149 +240,5 @@ public class IngestConfigurator { } } return moduleNames; - } - - public JPanel getIngestConfigPanel() { - return ingestConfigPanel; - } - - public void save() throws IngestConfigurationException { - if (null == context || null == ingestConfigPanel) { - throw new IngestConfigurationException("Ingest context not set"); - } - - List modules = ingestConfigPanel.getIngestModules(); - - // Save the enbaled/disabled ingest module settings for the current context. - HashSet enabledModuleNames = new HashSet<>(); - HashSet disabledModuleNames = new HashSet<>(); - for (IngestModuleModel module : modules) { - if (module.isEnabled()) { - enabledModuleNames.add(module.getModuleName()); - } - else { - disabledModuleNames.add(module.getModuleName()); - } - } - ModuleSettings.setConfigSetting(context, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames)); - ModuleSettings.setConfigSetting(context, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames)); - - // Save the process unallocated space setting for this context. - String processUnalloc = Boolean.toString(ingestConfigPanel.getProcessUnallocSpace()); - ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, processUnalloc); - - // Get the ingest module options for each ingest module. - // NOTE: In the future, this code will be modified to persist the ingest - // options for each ingest module for the current context. - // RJCTODO: Decide whether to set the ingest options here or in the dialog; in the dialog allows corrections by user -// if (currentModule != null && currentModule.hasSimpleConfiguration()) { -// currentModule.saveSimpleConfiguration(); -// } - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public void setContent(List inputContent) { - this.contentToIngest = inputContent; - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public void start() { - // Get the list of ingest modules selected by the user. - // RJCTODO: -// List modulesToStart = ingestConfigPanel.getModulesToStart(); - List modulesToStart = new ArrayList<>(); - - // Get the user's selection of whether or not to process unallocated space. - ingestManager.setProcessUnallocSpace(processUnallocatedSpace); - - if (!modulesToStart.isEmpty() && contentToIngest != null) { - // Queue the ingest process. - ingestManager.scheduleDataSource(modulesToStart, contentToIngest); - } - } - - // RJCTODO: If time permits, make it so that this class is not responsible - // starting and running the ingest - probably need to do this anyway, at - // least if the IngestConfigurator interface goes away and this becomes the - // IngestConfigurator class. - public boolean isIngestRunning() { - return ingestManager.isIngestRunning(); - } - - /** - * A model of an ingest module tailored for the view used to configure - * ingest modules. - */ - static class IngestModuleModel { - private final IngestModuleTemplate moduleTemplate; - private final IngestModuleFactory moduleFactory; - private final JPanel ingestOptionsPanel; - private final JPanel globalOptionsPanel; - private boolean enabled = true; - - IngestModuleModel(IngestModuleTemplate moduleTemplate, boolean enabled) { - this.moduleTemplate = moduleTemplate; - moduleFactory = moduleTemplate.getIngestModuleFactory(); - if (moduleFactory.providesIngestOptionsPanels()) { - ingestOptionsPanel = moduleFactory.getIngestOptionsPanel(moduleTemplate.getIngestOptions()); - } - else { - ingestOptionsPanel = null; - } - if (moduleFactory.providesGlobalOptionsPanels()) { - globalOptionsPanel = moduleFactory.getGlobalOptionsPanel(); - } - else { - globalOptionsPanel = null; - } - this.enabled = enabled; - } - - String getModuleName() { - return moduleFactory.getModuleDisplayName(); - } - - String getModuleDescription() { - return moduleFactory.getModuleDescription(); - } - - void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - boolean isEnabled() { - return enabled; - } - - boolean hasIngestOptionsPanel() { - return moduleFactory.providesIngestOptionsPanels(); - } - - JPanel getIngestOptionsPanel() { - return ingestOptionsPanel; - } - - boolean hasGlobalOptionsPanel() { - return moduleFactory.providesGlobalOptionsPanels(); - } - - JPanel getGlobalOptionsPanel() { - return globalOptionsPanel; - } - - void saveGlobalOptions() throws IngestModuleFactory.InvalidOptionsException { - // RJCTODO: Check for null. - moduleFactory.saveGlobalOptionsFromPanel(globalOptionsPanel); - } - - private IngestModuleTemplate getIngestModuleTemplate() { - return moduleTemplate; - } - } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index a5f3d22302..8409cebbe8 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -1310,10 +1310,16 @@ public class IngestManager { final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); while (fileScheduler.hasNext()) { final FileTask fileTask = fileScheduler.next(); - final DataSourceTask dataSourceTask = fileTask.getDataSourceTask(); - final AbstractFile fileToProcess = fileTask.getFile(); - - fileToProcess.close(); + // RJCTODO: fileTask.execute(thread id); + // In this method, the file task get the IngestPipelines object for + // the DataSourceTask and calls process(AbstractFile, thread id) + // The thread id allows the IngestPipelines to select the copy of the + // file ingest pipeline that is to be used by this thread. + // When the execute method completes, the scheduler needs to be notified... + // it asppears this is being done with an event. + // When the scheduler is all done with a DataSourceTask, all of the + // pipelines need a complete() call for thweir modules; stop also needs to + // be handled. } logger.log(Level.INFO, "IngestManager: Finished processing files"); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java index 821d5b0f0b..ab624cf14d 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleTemplate.java @@ -28,6 +28,7 @@ import java.io.Serializable; public class IngestModuleTemplate { private final IngestModuleFactory moduleFactory; private Serializable ingestOptions = null; + boolean enabled = true; IngestModuleTemplate(IngestModuleFactory moduleFactory, Serializable ingestOptions) { this.moduleFactory = moduleFactory; @@ -44,5 +45,13 @@ public class IngestModuleTemplate { void setIngestOptions(Serializable ingestOptions) { this.ingestOptions = ingestOptions; - } + } + + void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + boolean isEnabled() { + return enabled; + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java index bc3e786faf..95ed6163ae 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java @@ -21,23 +21,80 @@ package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; import java.util.List; +import java.io.Serializable; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.autopsy.ingest.IngestScheduler.FileScheduler.FileTask; /** - * - * @author rcordovano + * RJCTODO: */ public class IngestPipelines { - private List fileIngestPipeline = new ArrayList<>(); - private List dataSourceIngestPipeline = new ArrayList<>(); + private final long dataSourceTaskId; + private List fileIngestPipelineTemplate = new ArrayList<>(); + private List dataSourceIngestPipelineTemplate = new ArrayList<>(); - IngestPipelines(List moduleTemplates) { + IngestPipelines(long dataSourceTaskId, List moduleTemplates) { + this.dataSourceTaskId = dataSourceTaskId; for (IngestModuleTemplate moduleTemplate : moduleTemplates) { if (moduleTemplate.getIngestModuleFactory().isFileIngestModuleFactory()) { - fileIngestPipeline.add(moduleTemplate); + fileIngestPipelineTemplate.add(moduleTemplate); } else { - dataSourceIngestPipeline.add(moduleTemplate); + dataSourceIngestPipelineTemplate.add(moduleTemplate); } } + } + + DataSourceIngestPipeline getDataSourceIngestPipeline() { + return new DataSourceIngestPipeline(); } + + FileIngestPipeline getFileIngestPipeline() { + return new FileIngestPipeline(); + } + + public class DataSourceIngestPipeline { + private List modules = new ArrayList<>(); + + private DataSourceIngestPipeline() { + try { + for (IngestModuleTemplate moduleTemplate : dataSourceIngestPipelineTemplate) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + Serializable ingestOptions = moduleTemplate.getIngestOptions(); + DataSourceIngestModule module = moduleFactory.createDataSourceIngestModule(ingestOptions); + module.init(dataSourceTaskId); + modules.add(module); + } + } + catch (IngestModuleFactory.InvalidOptionsException ex) { + // RJCTODO: Is this a stopper condition? What about init? + } + } + } + + public class FileIngestPipeline { + private List modules = new ArrayList<>(); + + private FileIngestPipeline() { + try { + for (IngestModuleTemplate moduleTemplate : fileIngestPipelineTemplate) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + Serializable ingestOptions = moduleTemplate.getIngestOptions(); + FileIngestModule module = moduleFactory.createFileIngestModule(ingestOptions); + module.init(dataSourceTaskId); + modules.add(module); + } + } + catch (IngestModuleFactory.InvalidOptionsException ex) { + // RJCTODO: Is this a stopper condition? What about init? + } + } + + void doTask(FileTask fileTask) { + final DataSourceTask dataSourceTask = fileTask.getDataSourceTask(); + final AbstractFile fileToProcess = fileTask.getFile(); + + fileToProcess.close(); + } + } } From 0c7b2664ab95d68c0f8bddcdec165f770b528438 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 26 Feb 2014 17:54:33 -0500 Subject: [PATCH 14/48] Make compilable first version of minimal new ingest infrastructure --- .../AddImageWizardIngestConfigPanel.java | 8 +- .../SampleDataSourceIngestModule.java | 176 +- .../examples/SampleFileIngestModule.java | 268 +- .../ingest/DataSourceIngestModule.java | 5 +- .../autopsy/ingest/DataSourceTask.java | 90 +- .../autopsy/ingest/IngestConfigurator.java | 30 +- .../ingest/IngestDataSourceThread.java | 307 +- .../IngestDataSourceWorkerController.java | 111 +- .../autopsy/ingest/IngestDialog.java | 21 +- .../autopsy/ingest/IngestManager.java | 1264 ++++----- .../autopsy/ingest/IngestMessage.java | 18 +- .../autopsy/ingest/IngestMessagePanel.java | 21 +- .../autopsy/ingest/IngestModule.java | 42 +- .../autopsy/ingest/IngestModuleAbstract.java | 182 -- .../ingest/IngestModuleAbstractFile.java | 52 - .../ingest/IngestModuleDataSource.java | 52 - .../autopsy/ingest/IngestModuleLoader.java | 1192 ++++---- .../autopsy/ingest/IngestPipelines.java | 152 +- .../autopsy/ingest/IngestScheduler.java | 580 ++-- .../autopsy/ingest/IngestServices.java | 19 +- .../autopsy/ingest/PipelineContext.java | 74 - .../sleuthkit/autopsy/report/ReportHTML.java | 2 +- .../ExifParserFileIngestModule.java | 441 ++- .../AddFileExtensionAction.java | 24 +- .../FileExtMismatchConfigPanel.java | 101 +- ...ExtMismatchContextMenuActionsProvider.java | 210 +- .../FileExtMismatchIngestModule.java | 494 ++-- ...FileExtMismatchOptionsPanelController.java | 200 +- .../FileExtMismatchSimpleConfigPanel.java | 4 +- .../fileextmismatch/FileExtMismatchXML.java | 317 ++- .../filetypeid/FileTypeIdIngestModule.java | 339 ++- .../FileTypeIdSimpleConfigPanel.java | 2 +- .../hashdatabase/HashDbIngestModule.java | 5 + .../hashdatabase/HashLookupModuleFactory.java | 6 +- .../AbstractKeywordSearchPerformer.java | 6 +- .../KeywordSearchConfigurationPanel2.java | 3 +- .../KeywordSearchConfigurationPanel3.java | 3 +- .../KeywordSearchEditListPanel.java | 2 +- .../KeywordSearchIngestModule.java | 2497 ++++++++--------- .../KeywordSearchListsViewerPanel.java | 2 +- .../autopsy/recentactivity/Chrome.java | 1025 ++++--- .../autopsy/recentactivity/Extract.java | 253 +- .../autopsy/recentactivity/ExtractIE.java | 975 ++++--- .../recentactivity/ExtractRegistry.java | 1087 ++++--- .../autopsy/recentactivity/Firefox.java | 949 ++++--- .../recentactivity/RAImageIngestModule.java | 426 +-- .../recentactivity/RecentDocumentsByLnk.java | 233 +- .../SearchEngineURLQueryAnalyzer.java | 760 +++-- .../scalpel/ScalpelCarverIngestModule.java | 523 ++-- .../sevenzip/SevenZipIngestModule.java | 1941 +++++++------ .../ewfverify/EwfVerifyIngestModule.java | 402 ++- .../ThunderbirdMboxFileIngestModule.java | 759 +++-- 52 files changed, 9032 insertions(+), 9623 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstract.java delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstractFile.java delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleDataSource.java delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/PipelineContext.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java index 783b6d8ed7..75948f9117 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIngestConfigPanel.java @@ -187,12 +187,8 @@ class AddImageWizardIngestConfigPanel implements WizardDescriptor.Panel pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - - Case case1 = Case.getCurrentCase(); - SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); - - Services services = new Services(sleuthkitCase); - FileManager fm = services.getFileManager(); - try { - /* you can use the findFiles method in FileManager (or similar ones in - * SleuthkitCase to find files based only on their name. This - * one finds files that have a .doc extension. */ - List docFiles = fm.findFiles(dataSource, "%.doc"); - for (AbstractFile file : docFiles) { - // do something with each doc file - } - - /* We can also do more general queries with findFilesWhere, which - * allows us to make our own WHERE clause in the database. - */ - long currentTime = System.currentTimeMillis()/1000; - // go back 2 weeks - long minTime = currentTime - (14 * 24 * 60 * 60); - List otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime); - // do something with these files... - - } catch (TskCoreException ex) { - Logger log = Logger.getLogger(SampleDataSourceIngestModule.class); - log.fatal("Error retrieving files from database: " + ex.getLocalizedMessage()); - return; - } - } - - @Override - public void init(IngestModuleInit initContext) { - // do nothing - } - - @Override - public void complete() { - // do nothing - } - - @Override - public void stop() { - // do nothing - } - - @Override - public String getName() { - return "SampleDataSourceIngestModule"; - } - - @Override - public String getVersion() { - return "1.0"; - } - - @Override - public String getDescription() { - return "Doesn't do much"; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +// class SampleDataSourceIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleDataSource { +// +// /* Data Source modules operate on a disk or set of logical files. They +// * are passed in teh data source refernce and query it for things they want. +// */ +// @Override +// public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// +// Case case1 = Case.getCurrentCase(); +// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); +// +// Services services = new Services(sleuthkitCase); +// FileManager fm = services.getFileManager(); +// try { +// /* you can use the findFiles method in FileManager (or similar ones in +// * SleuthkitCase to find files based only on their name. This +// * one finds files that have a .doc extension. */ +// List docFiles = fm.findFiles(dataSource, "%.doc"); +// for (AbstractFile file : docFiles) { +// // do something with each doc file +// } +// +// /* We can also do more general queries with findFilesWhere, which +// * allows us to make our own WHERE clause in the database. +// */ +// long currentTime = System.currentTimeMillis()/1000; +// // go back 2 weeks +// long minTime = currentTime - (14 * 24 * 60 * 60); +// List otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime); +// // do something with these files... +// +// } catch (TskCoreException ex) { +// Logger log = Logger.getLogger(SampleDataSourceIngestModule.class); +// log.fatal("Error retrieving files from database: " + ex.getLocalizedMessage()); +// return; +// } +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// // do nothing +// } +// +// @Override +// public void complete() { +// // do nothing +// } +// +// @Override +// public void stop() { +// // do nothing +// } +// +// @Override +// public String getName() { +// return "SampleDataSourceIngestModule"; +// } +// +// @Override +// public String getVersion() { +// return "1.0"; +// } +// +// @Override +// public String getDescription() { +// return "Doesn't do much"; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java index 464c79888c..2db5eaa7e2 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java @@ -30,19 +30,17 @@ package org.sleuthkit.autopsy.examples; -import org.apache.log4j.Logger; -import org.openide.util.Exceptions; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskData; +//import org.apache.log4j.Logger; +//import org.openide.util.Exceptions; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.SleuthkitCase; +//import org.sleuthkit.datamodel.TskData; +// RJCTODO /** * This is a sample and simple module. It is a file-level ingest module, meaning * that it will get called on each file in the disk image / logical file set. @@ -53,126 +51,126 @@ import org.sleuthkit.datamodel.TskData; * IngestModuleLoader will not load things from the org.sleuthkit.autopsy.examples package. * Either change the package or the loading code to make this module actually run. */ - class SampleFileIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { - private int attrId = -1; - private static SampleFileIngestModule defaultInstance = null; - - // Private to ensure Singleton status - private SampleFileIngestModule() { - } - - // File-level ingest modules are currently singleton -- this is required - public static synchronized SampleFileIngestModule getDefault() { - //defaultInstance is a private static class variable - if (defaultInstance == null) { - defaultInstance = new SampleFileIngestModule(); - } - return defaultInstance; - } - - - @Override - public void init(IngestModuleInit initContext) { - /* For this demo, we are going to make a private attribute to post our - * results to the blackbaord with. There are many standard blackboard artifact - * and attribute types and you should first consider using one of those before - * making private ones because other modules won't know about provate ones. - * Because our demo has results that have no real value, we do not have an - * official attribute for them. - */ - Case case1 = Case.getCurrentCase(); - SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); - - // see if the type already exists in the blackboard. - try { - attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE"); - } catch (TskCoreException ex) { - // create it if not - try { - attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute"); - } catch (TskCoreException ex1) { - Logger log = Logger.getLogger(SampleFileIngestModule.class); - log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage()); - attrId = -1; - } - } - } - - @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - // skip non-files - if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || - (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { - return ProcessResult.OK; - } - - // skip NSRL / known files - if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { - return ProcessResult.OK; - } - - - /* Do a non-sensical calculation of the number of 0x00 bytes - * in the first 1024-bytes of the file. This is for demo - * purposes only. - */ - try { - byte buffer[] = new byte[1024]; - int len = abstractFile.read(buffer, 0, 1024); - int count = 0; - for (int i = 0; i < len; i++) { - if (buffer[i] == 0x00) { - count++; - } - } - - if (attrId != -1) { - // Make an attribute using the ID for the private type that we previously created. - BlackboardAttribute attr = new BlackboardAttribute(attrId, getName(), count); - - /* add it to the general info artifact. In real modules, you would likely have - * more complex data types and be making more specific artifacts. - */ - BlackboardArtifact art = abstractFile.getGenInfoArtifact(); - art.addAttribute(attr); - } - - return ProcessResult.OK; - } catch (TskCoreException ex) { - Exceptions.printStackTrace(ex); - return ProcessResult.ERROR; - } - } - - - @Override - public void complete() { - - } - - @Override - public void stop() { - - } - - @Override - public String getVersion() { - return "1.0"; - } - - @Override - public String getName() { - return "SampleFileIngestModule"; - } - - @Override - public String getDescription() { - return "Doesn't do much"; - } - - @Override - public boolean hasBackgroundJobsRunning() { - // we're single threaded... - return false; - } -} +// class SampleFileIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { +// private int attrId = -1; +// private static SampleFileIngestModule defaultInstance = null; +// +// // Private to ensure Singleton status +// private SampleFileIngestModule() { +// } +// +// // File-level ingest modules are currently singleton -- this is required +// public static synchronized SampleFileIngestModule getDefault() { +// //defaultInstance is a private static class variable +// if (defaultInstance == null) { +// defaultInstance = new SampleFileIngestModule(); +// } +// return defaultInstance; +// } +// +// +// @Override +// public void init(IngestModuleInit initContext) { +// /* For this demo, we are going to make a private attribute to post our +// * results to the blackbaord with. There are many standard blackboard artifact +// * and attribute types and you should first consider using one of those before +// * making private ones because other modules won't know about provate ones. +// * Because our demo has results that have no real value, we do not have an +// * official attribute for them. +// */ +// Case case1 = Case.getCurrentCase(); +// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); +// +// // see if the type already exists in the blackboard. +// try { +// attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE"); +// } catch (TskCoreException ex) { +// // create it if not +// try { +// attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute"); +// } catch (TskCoreException ex1) { +// Logger log = Logger.getLogger(SampleFileIngestModule.class); +// log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage()); +// attrId = -1; +// } +// } +// } +// +// @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// // skip non-files +// if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || +// (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { +// return ProcessResult.OK; +// } +// +// // skip NSRL / known files +// if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) { +// return ProcessResult.OK; +// } +// +// +// /* Do a non-sensical calculation of the number of 0x00 bytes +// * in the first 1024-bytes of the file. This is for demo +// * purposes only. +// */ +// try { +// byte buffer[] = new byte[1024]; +// int len = abstractFile.read(buffer, 0, 1024); +// int count = 0; +// for (int i = 0; i < len; i++) { +// if (buffer[i] == 0x00) { +// count++; +// } +// } +// +// if (attrId != -1) { +// // Make an attribute using the ID for the private type that we previously created. +// BlackboardAttribute attr = new BlackboardAttribute(attrId, getName(), count); +// +// /* add it to the general info artifact. In real modules, you would likely have +// * more complex data types and be making more specific artifacts. +// */ +// BlackboardArtifact art = abstractFile.getGenInfoArtifact(); +// art.addAttribute(attr); +// } +// +// return ProcessResult.OK; +// } catch (TskCoreException ex) { +// Exceptions.printStackTrace(ex); +// return ProcessResult.ERROR; +// } +// } +// +// +// @Override +// public void complete() { +// +// } +// +// @Override +// public void stop() { +// +// } +// +// @Override +// public String getVersion() { +// return "1.0"; +// } +// +// @Override +// public String getName() { +// return "SampleFileIngestModule"; +// } +// +// @Override +// public String getDescription() { +// return "Doesn't do much"; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// // we're single threaded... +// return false; +// } +//} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 210ca983fb..2d6f7d730b 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -32,5 +32,6 @@ public interface DataSourceIngestModule extends IngestModule { * @param statusHelper A status helper to be used to report progress and * detect task cancellation. */ - void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class -} +// void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class + void process(Content dataSource); +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java index 01bac09f19..942190844b 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013 Basis Technology Corp. + * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,59 +20,68 @@ package org.sleuthkit.autopsy.ingest; import java.util.List; +import java.util.Objects; import org.sleuthkit.datamodel.Content; +// RJCTODO: Update comment /** * Represents a data source-level task to schedule and analyze. * Children of the data will also be scheduled. * * @param T type of Ingest Module / Pipeline (file or data source content) associated with this task */ -class DataSourceTask { - private Content input; - private List modules; - private boolean processUnallocated; - private PipelineContext pipelineContext; +class DataSourceTask { + private final long id; + private final Content dataSource; + private final IngestPipelines ingestPipelines; + private final boolean processUnallocatedSpace; + private long fileTasksCount = 0; // RJCTODO: Need additional counters - public DataSourceTask(Content input, List modules, boolean processUnallocated) { - this.input = input; - this.modules = modules; - this.processUnallocated = processUnallocated; - pipelineContext = new PipelineContext(this); - } - - public Content getContent() { - return input; + DataSourceTask(long id, Content dataSource, List ingestModuleTemplates, boolean processUnallocatedSpace) { + this.id = id; + this.dataSource = dataSource; + this.ingestPipelines = new IngestPipelines(id, ingestModuleTemplates); + this.processUnallocatedSpace = processUnallocatedSpace; + } + + long getTaskId() { + return id; } - public PipelineContext getPipelineContext() { - return pipelineContext; + Content getDataSource() { + return dataSource; } - public List getModules() { - return modules; + IngestPipelines getIngestPipelines() { + return ingestPipelines; } /** * Returns value of if unallocated space should be analyzed (and scheduled) * @return True if pipeline should process unallocated space. */ - boolean isProcessUnalloc() { - return processUnallocated; + boolean getProcessUnallocatedSpace() { + return processUnallocatedSpace; } - // @@@ BC: I think this should go away. - void addModules(List newModules) { - for (T newModule : newModules) { - if (!modules.contains(newModule)) { - modules.add(newModule); - } + synchronized void fileTaskScheduled() { + // RJCTODO: Implement the counters for fully, or do list scanning + ++fileTasksCount; + } + + synchronized void fileTaskCompleted() { + // RJCTODO: Implement the counters for fully, or do list scanning + --fileTasksCount; + if (0 == fileTasksCount) { + // RJCTODO } } - + @Override public String toString() { - return "ScheduledTask{" + "input=" + input + ", modules=" + modules + '}'; + // RJCTODO: Improve? Is this useful? +// return "ScheduledTask{" + "input=" + dataSource + ", modules=" + modules + '}'; + return "ScheduledTask{ id=" + id + ", dataSource=" + dataSource + '}'; } /** @@ -85,20 +94,31 @@ class DataSourceTask { */ @Override public boolean equals(Object obj) { + // RJCTODO: Revisit this, probably don't need it if (obj == null) { return false; } + if (getClass() != obj.getClass()) { return false; } - final DataSourceTask other = (DataSourceTask) obj; - if (this.input != other.input && (this.input == null || !this.input.equals(other.input))) { + + final DataSourceTask other = (DataSourceTask)obj; + if (this.dataSource != other.dataSource && (this.dataSource == null || !this.dataSource.equals(other.dataSource))) { return false; } - if (this.modules != other.modules && (this.modules == null || !this.modules.equals(other.modules))) { - return false; - } - + return true; } + + @Override + public int hashCode() { + // RJCTODO: Probably don't need this + int hash = 5; + hash = 61 * hash + (int) (this.id ^ (this.id >>> 32)); + hash = 61 * hash + Objects.hashCode(this.dataSource); + hash = 61 * hash + Objects.hashCode(this.ingestPipelines); + hash = 61 * hash + (this.processUnallocatedSpace ? 1 : 0); + return hash; + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java index 61639a4dd4..88e79f2efb 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurator.java @@ -37,19 +37,11 @@ public class IngestConfigurator { private static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules"; private static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules"; private static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space"; - private final IngestManager ingestManager = IngestManager.getDefault(); private final String context; private List missingIngestModuleErrorMessages = new ArrayList<>(); - private boolean processUnallocatedSpace = false; private IngestConfigurationPanel ingestConfigPanel = null; private List contentToIngest = null; // RJCTODO: Remove if start() method removed - public class IngestConfigurationException extends Exception { - IngestConfigurationException(String message) { - super(message); - } - } - /** * RJCTODO * @param context @@ -60,7 +52,7 @@ public class IngestConfigurator { // Get the ingest module factories discovered by the ingest module // loader. // RJCTODO: Put in name uniqueness test/solution in loader! - List moduleFactories = IngestManager.getDefault().getIngestModuleFactories(); + List moduleFactories = IngestModuleLoader.getDefault().getIngestModuleFactories(); HashSet loadedModuleNames = new HashSet<>(); for (IngestModuleFactory moduleFactory : moduleFactories) { loadedModuleNames.add(moduleFactory.getModuleDisplayName()); @@ -114,7 +106,7 @@ public class IngestConfigurator { if (ModuleSettings.settingExists(context, PARSE_UNALLOC_SPACE_KEY) == false) { ModuleSettings.setConfigSetting(context, PARSE_UNALLOC_SPACE_KEY, "false"); } - processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); + boolean processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(context, PARSE_UNALLOC_SPACE_KEY)); // Make the configuration panel for the current context (view). ingestConfigPanel = new IngestConfigurationPanel(moduleTemplates, processUnallocatedSpace); @@ -140,7 +132,7 @@ public class IngestConfigurator { * RJCTODO * @throws org.sleuthkit.autopsy.ingest.IngestConfigurator.IngestConfigurationException */ - public void save() throws IngestConfigurationException { + public void save() { List moduleTemplates = ingestConfigPanel.getIngestModuleTemplates(); // Save the enabled/disabled ingest module settings for the current context. @@ -179,14 +171,16 @@ public class IngestConfigurator { // least if the IngestConfigurator interface goes away and this becomes the // IngestConfigurator class. public void start() { + // Filter out the disabled module tremplates. List moduleTemplates = ingestConfigPanel.getIngestModuleTemplates(); + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + if (!moduleTemplate.isEnabled()) { + moduleTemplates.remove(moduleTemplate); + } + } - // Get the user's selection of whether or not to process unallocated space. - ingestManager.setProcessUnallocSpace(processUnallocatedSpace); - - if (!modulesToStart.isEmpty() && contentToIngest != null) { - // Queue the ingest process. - ingestManager.scheduleDataSource(modulesToStart, contentToIngest); + if (!moduleTemplates.isEmpty() && null != contentToIngest) { + IngestManager.getDefault().scheduleDataSource(contentToIngest, moduleTemplates, ingestConfigPanel.getProcessUnallocSpace()); } } @@ -195,7 +189,7 @@ public class IngestConfigurator { // least if the IngestConfigurator interface goes away and this becomes the // IngestConfigurator class. public boolean isIngestRunning() { - return ingestManager.isIngestRunning(); + return IngestManager.getDefault().isIngestRunning(); } private static String makeCommaSeparatedList(HashSet input) { diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java index f599edf44a..48abeca7b5 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,164 +18,157 @@ */ package org.sleuthkit.autopsy.ingest; -//ingester worker for DataSource queue -import java.awt.EventQueue; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import javax.swing.SwingWorker; -import org.netbeans.api.progress.ProgressHandle; -import org.netbeans.api.progress.ProgressHandleFactory; -import org.openide.util.Cancellable; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.StopWatch; -import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -import org.sleuthkit.datamodel.Content; +//import java.awt.EventQueue; +//import java.util.concurrent.locks.Lock; +//import java.util.concurrent.locks.ReentrantReadWriteLock; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import javax.swing.SwingWorker; +//import org.netbeans.api.progress.ProgressHandle; +//import org.netbeans.api.progress.ProgressHandleFactory; +//import org.openide.util.Cancellable; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.coreutils.StopWatch; +//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; +//import org.sleuthkit.datamodel.Content; /** * Worker thread that runs a data source-level ingest module (image, file set virt dir, etc). * Used to process only a single data-source and single module. */ - class IngestDataSourceThread extends SwingWorker { - - private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); - private ProgressHandle progress; - private final PipelineContextpipelineContext; - private final Content dataSource; - private final IngestModuleDataSource module; - private IngestDataSourceWorkerController controller; - private final IngestManager manager; - private final IngestModuleInit init; - private boolean inited; - //current method of enqueuing data source ingest modules with locks and internal lock queue - //ensures that we init, run and complete a single data source ingest module at a time - //uses fairness policy to run them in order enqueued - //TODO use a real queue and manager to allow multiple different modules to run in parallel - private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); - - IngestDataSourceThread(IngestManager manager, PipelineContextpipelineContext, Content dataSource, IngestModuleDataSource module, IngestModuleInit init) { - this.manager = manager; - this.pipelineContext = pipelineContext; - this.dataSource = dataSource; - this.module = module; - this.init = init; - this.inited = false; - } - - PipelineContextgetContext() { - return pipelineContext; - } - - Content getContent() { - return pipelineContext.getDataSourceTask().getContent(); - } - - IngestModuleDataSource getModule() { - return module; - } - - public void init() { - - logger.log(Level.INFO, "Initializing module: " + module.getName()); - try { - module.init(init); - inited = true; - } catch (Exception e) { - logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run."); - //will not run - inited = false; - throw e; - } - } - - @Override - protected Void doInBackground() throws Exception { - - logger.log(Level.INFO, "Pending module: " + module.getName()); - - final String displayName = module.getName() + " dataSource id:" + dataSource.getId(); - progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { - @Override - public boolean cancel() { - logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user."); - if (progress != null) { - progress.setDisplayName(displayName + " (Cancelling...)"); - } - return IngestDataSourceThread.this.cancel(true); - } - }); - progress.start(); - progress.switchToIndeterminate(); - - dataSourceIngestModuleLock.lock(); - try { - if (this.isCancelled()) { - logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName()); - return Void.TYPE.newInstance(); - } - logger.log(Level.INFO, "Starting module: " + module.getName()); - logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); - progress.setDisplayName(displayName); - - if (inited == false) { - logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName()); - return Void.TYPE.newInstance(); - } - logger.log(Level.INFO, "Starting processing of module: " + module.getName()); - - controller = new IngestDataSourceWorkerController(this, progress); - - if (isCancelled()) { - logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation."); - return Void.TYPE.newInstance(); - } - final StopWatch timer = new StopWatch(); - timer.start(); - try { - module.process(pipelineContext, dataSource, controller); - } catch (Exception e) { - logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e); - } finally { - timer.stop(); - logger.log(Level.INFO, "Done processing of module: " + module.getName() - + " took " + timer.getElapsedTimeSecs() + " secs. to process()"); - - - //cleanup queues (worker and DataSource/module) - manager.removeDataSourceIngestWorker(this); - - if (!this.isCancelled()) { - logger.log(Level.INFO, "Module " + module.getName() + " completed"); - try { - module.complete(); - } catch (Exception e) { - logger.log(Level.INFO, "Error completing the module " + module.getName(), e); - } - IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName()); - } else { - logger.log(Level.INFO, "Module " + module.getName() + " stopped"); - try { - module.stop(); - } catch (Exception e) { - logger.log(Level.INFO, "Error stopping the module" + module.getName(), e); - } - IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName()); - } - - } - return Void.TYPE.newInstance(); - } finally { - //release the lock so next module can run - dataSourceIngestModuleLock.unlock(); - EventQueue.invokeLater(new Runnable() { - @Override - public void run() { - progress.finish(); - } - }); - logger.log(Level.INFO, "Done running module: " + module.getName()); - logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); - } - } -} +// class IngestDataSourceThread extends SwingWorker { +// +// private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); +// private ProgressHandle progress; +// private final Content dataSource; +// private final DataSourceIngestModule module; +// private IngestDataSourceWorkerController controller; +// private final IngestManager manager; +// private final IngestModuleInit init; +// private boolean inited; +// //current method of enqueuing data source ingest modules with locks and internal lock queue +// //ensures that we init, run and complete a single data source ingest module at a time +// //uses fairness policy to run them in order enqueued +// private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); +// +// IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module, IngestModuleInit init) { +// this.manager = manager; +// this.dataSource = dataSource; +// this.module = module; +// this.init = init; +// this.inited = false; +// } +// +// Content getContent() { +// return dataSource; +// } +// +// DataSourceIngestModule getModule() { +// return module; +// } +// +// public void init() { +// +// logger.log(Level.INFO, "Initializing module: " + module.getName()); +// try { +// module.init(dataSource.getId()); +// inited = true; +// } catch (Exception e) { +// logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run."); +// //will not run +// inited = false; +// throw e; +// } +// } +// +// @Override +// protected Void doInBackground() throws Exception { +// +// logger.log(Level.INFO, "Pending module: " + module.getName()); +// +// final String displayName = module.getName() + " dataSource id:" + dataSource.getId(); +// progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { +// @Override +// public boolean cancel() { +// logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user."); +// if (progress != null) { +// progress.setDisplayName(displayName + " (Cancelling...)"); +// } +// return IngestDataSourceThread.this.cancel(true); +// } +// }); +// progress.start(); +// progress.switchToIndeterminate(); +// +// dataSourceIngestModuleLock.lock(); +// try { +// if (this.isCancelled()) { +// logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName()); +// return Void.TYPE.newInstance(); +// } +// logger.log(Level.INFO, "Starting module: " + module.getName()); +// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); +// progress.setDisplayName(displayName); +// +// if (inited == false) { +// logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName()); +// return Void.TYPE.newInstance(); +// } +// logger.log(Level.INFO, "Starting processing of module: " + module.getName()); +// +// controller = new IngestDataSourceWorkerController(this, progress); +// +// if (isCancelled()) { +// logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation."); +// return Void.TYPE.newInstance(); +// } +// final StopWatch timer = new StopWatch(); +// timer.start(); +// try { +// // RJCTODO +//// module.process(pipelineContext, dataSource, controller); +// } catch (Exception e) { +// logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e); +// } finally { +// timer.stop(); +// logger.log(Level.INFO, "Done processing of module: " + module.getName() +// + " took " + timer.getElapsedTimeSecs() + " secs. to process()"); +// +// +// //cleanup queues (worker and DataSource/module) +// manager.removeDataSourceIngestWorker(this); +// +// if (!this.isCancelled()) { +// logger.log(Level.INFO, "Module " + module.getName() + " completed"); +// try { +// module.complete(); +// } catch (Exception e) { +// logger.log(Level.INFO, "Error completing the module " + module.getName(), e); +// } +// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName()); +// } else { +// logger.log(Level.INFO, "Module " + module.getName() + " stopped"); +// try { +// module.stop(); +// } catch (Exception e) { +// logger.log(Level.INFO, "Error stopping the module" + module.getName(), e); +// } +// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName()); +// } +// +// } +// return Void.TYPE.newInstance(); +// } finally { +// //release the lock so next module can run +// dataSourceIngestModuleLock.unlock(); +// EventQueue.invokeLater(new Runnable() { +// @Override +// public void run() { +// progress.finish(); +// } +// }); +// logger.log(Level.INFO, "Done running module: " + module.getName()); +// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); +// } +// } +//} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java index 052562e0f5..b7fb96c977 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java @@ -18,63 +18,64 @@ */ package org.sleuthkit.autopsy.ingest; -import org.netbeans.api.progress.ProgressHandle; +//import org.netbeans.api.progress.ProgressHandle; +// RJCTODO: Rework or replace this code /** * Controller for DataSource level ingest modules * Used by modules to check task status and to post progress to */ -public class IngestDataSourceWorkerController { - - private IngestDataSourceThread worker; - private ProgressHandle progress; - - /** - * Instantiate the controller for the worker - * @param worker underlying DataSource ingest thread - * @param progress the progress handle - */ - IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { - this.worker = worker; - this.progress = progress; - } - - /** - * Check if the task has been cancelled. This should be polled by the module periodically - * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup - * - * @return true if the task has been cancelled, false otherwise - */ - public boolean isCancelled() { - return worker.isCancelled(); - } - - /** - * Update the progress bar and switch to determinate mode once number of total work units is known - * @param workUnits total number of work units for the DataSource ingest task - */ - public void switchToDeterminate(int workUnits) { - if (progress != null) { - progress.switchToDeterminate(workUnits); - } - } - - /** - * Update the progress bar and switch to non determinate mode if number of work units is not known - */ - public void switchToInDeterminate() { - if (progress != null) { - progress.switchToIndeterminate(); - } - } - - /** - * Update the progress bar with the number of work units performed, if in the determinate mode - * @param workUnits number of work units performed so far by the module - */ - public void progress(int workUnits) { - if (progress != null) { - progress.progress(worker.getContent().getName(), workUnits); - } - } -} \ No newline at end of file +//public class IngestDataSourceWorkerController { +// +// private IngestDataSourceThread worker; +// private ProgressHandle progress; +// +// /** +// * Instantiate the controller for the worker +// * @param worker underlying DataSource ingest thread +// * @param progress the progress handle +// */ +// IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { +// this.worker = worker; +// this.progress = progress; +// } +// +// /** +// * Check if the task has been cancelled. This should be polled by the module periodically +// * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup +// * +// * @return true if the task has been cancelled, false otherwise +// */ +// public boolean isCancelled() { +// return worker.isCancelled(); +// } +// +// /** +// * Update the progress bar and switch to determinate mode once number of total work units is known +// * @param workUnits total number of work units for the DataSource ingest task +// */ +// public void switchToDeterminate(int workUnits) { +// if (progress != null) { +// progress.switchToDeterminate(workUnits); +// } +// } +// +// /** +// * Update the progress bar and switch to non determinate mode if number of work units is not known +// */ +// public void switchToInDeterminate() { +// if (progress != null) { +// progress.switchToIndeterminate(); +// } +// } +// +// /** +// * Update the progress bar with the number of work units performed, if in the determinate mode +// * @param workUnits number of work units performed so far by the module +// */ +// public void progress(int workUnits) { +// if (progress != null) { +// progress.progress(worker.getContent().getName(), workUnits); +// } +// } +//} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java index 88315d80e5..0d2d854d98 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDialog.java @@ -83,12 +83,7 @@ public class IngestDialog extends JDialog { @Override public void actionPerformed(ActionEvent e) { - try { - ingestConfigurator.save(); - } - catch (IngestConfigurator.IngestConfigurationException ex) { - // RJCTODO: Decide what to do here. - } + ingestConfigurator.save(); ingestConfigurator.start(); close(); } @@ -97,12 +92,7 @@ public class IngestDialog extends JDialog { @Override public void actionPerformed(ActionEvent e) { - try { - ingestConfigurator.save(); - } - catch (IngestConfigurator.IngestConfigurationException ex) { - // RJCTODO: Decide what to do here. - } + ingestConfigurator.save(); close(); } }); @@ -110,12 +100,7 @@ public class IngestDialog extends JDialog { @Override public void windowClosing(WindowEvent e) { - try { - ingestConfigurator.save(); - } - catch (IngestConfigurator.IngestConfigurationException ex) { - // RJCTODO: Decide what to do here. - } + ingestConfigurator.save(); close(); } }); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 8409cebbe8..23e04fe88d 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -54,30 +54,19 @@ import org.sleuthkit.datamodel.Content; * */ public class IngestManager { - private static final Logger logger = Logger.getLogger(IngestManager.class.getName()); - private IngestManagerStats stats; - private boolean processUnallocSpace = true; - //queues +// private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement private final IngestScheduler scheduler; - //workers private IngestAbstractFileProcessor abstractFileIngester; - private List dataSourceIngesters; +// private List dataSourceIngesters; // RJCTODO: Adapt to new paradigm private SwingWorker queueWorker; - //modules - private List dataSourceModules; - private List abstractFileModules; - // module return values - private final Map abstractFileModulesRetValues = new HashMap(); - //notifications +// private final Map abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class); - //monitor private final IngestMonitor ingestMonitor = new IngestMonitor(); - //module loader - private IngestModuleLoader moduleLoader = null; - //property file name id for the module +// private IngestModuleLoader moduleLoader = null; + private DataSourceTask currentTask = null; // RJCTODO: Temp glue code, remove + private long nextDataSourceTaskId = 0; public final static String MODULE_PROPERTIES = "ingest"; - private volatile int messageID = 0; /** * Possible events about ingest modules Event listeners can get the event @@ -140,36 +129,17 @@ public class IngestManager { private static volatile IngestManager instance; private IngestManager() { - dataSourceIngesters = new ArrayList(); + // RJCTODO: Adapt to new paradigm +// dataSourceIngesters = new ArrayList(); scheduler = IngestScheduler.getInstance(); - - //setup current modules and listeners for modules changes - initModules(); - } - private void initModules() { - try { - moduleLoader = IngestModuleLoader.getDefault(); - abstractFileModules = moduleLoader.getAbstractFileIngestModules(); - - moduleLoader.addModulesReloadedListener(new PropertyChangeListener() { - @Override - public void propertyChange(PropertyChangeEvent evt) { - if (evt.getPropertyName().equals(IngestModuleLoader.Event.ModulesReloaded.toString())) { - //TODO might need to not allow to remove modules if they are running - abstractFileModules = moduleLoader.getAbstractFileIngestModules(); - dataSourceModules = moduleLoader.getDataSourceIngestModules(); - } - } - }); - dataSourceModules = moduleLoader.getDataSourceIngestModules(); - } catch (IngestModuleLoaderException ex) { - logger.log(Level.SEVERE, "Error getting module loader"); - } + // RJCTODO: May want finer-grained control than simply locking the IngestManager + synchronized long getNextDataSourceTaskId() { + return ++nextDataSourceTaskId; } - + /** * called by Installer in AWT thread once the Window System is ready */ @@ -214,8 +184,7 @@ public class IngestManager { MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to Ingest Manager updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); } } - - + /** * Fire event when file is done with a pipeline run * @param objId ID of file that is done @@ -230,7 +199,6 @@ public class IngestManager { } } - /** * Fire event for ModuleDataEvent (when modules post data to blackboard, etc.) * @param moduleDataEvent @@ -259,6 +227,8 @@ public class IngestManager { } } + // RJCTODO: This method and the concept it supports (modules are able to query the success or failure of + // other modules in the pipeline by name) may be obsolete. /** * Returns the return value from a previously run module on the file being * currently analyzed. @@ -266,16 +236,17 @@ public class IngestManager { * @param moduleName Name of module. * @returns Return value from that module if it was previously run. */ - IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { - synchronized (abstractFileModulesRetValues) { - if (abstractFileModulesRetValues.containsKey(moduleName)) { - return abstractFileModulesRetValues.get(moduleName); - } else { - return IngestModuleAbstractFile.ProcessResult.UNKNOWN; - } - } - } +// IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { +// synchronized (abstractFileModulesRetValues) { +// if (abstractFileModulesRetValues.containsKey(moduleName)) { +// return abstractFileModulesRetValues.get(moduleName); +// } else { +// return IngestModuleAbstractFile.ProcessResult.UNKNOWN; +// } +// } +// } + // RJCTODO: Update comment /** * Multiple data-sources version of scheduleDataSource() method. Enqueues multiple sources inputs (Content objects) * and associated modules at once @@ -283,15 +254,16 @@ public class IngestManager { * @param modules modules to scheduleDataSource on every data source * @param inputs input data sources to enqueue and scheduleDataSource the ingest modules on */ - public void scheduleDataSource(final List modules, final List inputs) { - logger.log(Level.INFO, "Will enqueue number of inputs: " + inputs.size() - + " to " + modules.size() + " modules."); + public void scheduleDataSource(final List dataSources, final List moduleTemplates, boolean processUnallocatedSpace) { + // RJCTODO: If this is useful logging, reimplement +// logger.log(Level.INFO, "Will enqueue number of inputs: " + inputs.size() +// + " to " + modules.size() + " modules."); if (!isIngestRunning() && ui != null) { ui.clearMessages(); } - queueWorker = new EnqueueWorker(modules, inputs); + queueWorker = new EnqueueWorker(dataSources, moduleTemplates, processUnallocatedSpace); queueWorker.execute(); if (ui != null) { @@ -299,6 +271,7 @@ public class IngestManager { } } + // RJCTODO: Comment out of date, is this even used? /** * IngestManager entry point, enqueues data to be processed and starts new ingest * as needed, or just enqueues data to an existing pipeline. @@ -313,13 +286,14 @@ public class IngestManager { * @param modules modules to scheduleDataSource on the data source input * @param input input data source Content objects to scheduleDataSource the ingest modules on */ - public void scheduleDataSource(final List modules, final Content input) { - List inputs = new ArrayList(); - inputs.add(input); - logger.log(Level.INFO, "Will enqueue input: " + input.getName()); - scheduleDataSource(modules, inputs); + public void scheduleDataSource(final Content dataSource, final List moduleTemplates, boolean processUnallocatedSpace) { + List dataSources = new ArrayList<>(); + dataSources.add(dataSource); + logger.log(Level.INFO, "Will enqueue input: {0}", dataSource.getName()); + scheduleDataSource(dataSources, moduleTemplates, processUnallocatedSpace); } + // RJCTODO: Fix comment /** * Schedule a file for ingest and add it to ongoing file ingest process on the same data source. * Scheduler updates the current progress. @@ -331,8 +305,8 @@ public class IngestManager { * @param pipelineContext ingest context used to ingest parent of the file * to be scheduled */ - void scheduleFile(AbstractFile file, PipelineContext pipelineContext) { - scheduler.getFileScheduler().schedule(file, pipelineContext); + void scheduleFile(long dataSourceTaskId, AbstractFile file) { + scheduler.getFileScheduler().scheduleIngestOfDerivedFile(dataSourceTaskId, file); } /** @@ -349,82 +323,82 @@ public class IngestManager { final IngestScheduler.DataSourceScheduler dataSourceScheduler = scheduler.getDataSourceScheduler(); final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); boolean allInited = true; - IngestModuleAbstract failedModule = null; - String errorMessage = ""; - logger.log(Level.INFO, "DataSource queue: " + dataSourceScheduler.toString()); - logger.log(Level.INFO, "File queue: " + fileScheduler.toString()); +// IngestModuleAbstract failedModule = null; RJCTODO: DO not currently have an early module init scheme +// String errorMessage = ""; + logger.log(Level.INFO, "DataSource queue: {0}", dataSourceScheduler.toString()); + logger.log(Level.INFO, "File queue: {0}", fileScheduler.toString()); if (!ingestMonitor.isRunning()) { ingestMonitor.start(); } + // RJCTODO: Fix data source ingest ///////// // Start the data source-level ingest modules - List newThreads = new ArrayList<>(); +// List newThreads = new ArrayList<>(); // cycle through each data source content in the queue - while (dataSourceScheduler.hasNext()) { - if (allInited == false) { - break; - } - //dequeue - // get next data source content and set of modules - final DataSourceTask dataSourceTask = dataSourceScheduler.next(); - - // check if each module for this data source content is already running - for (IngestModuleDataSource dataSourceTaskModule : dataSourceTask.getModules()) { - boolean alreadyRunning = false; - for (IngestDataSourceThread worker : dataSourceIngesters) { - // ignore threads that are on different data sources - if (!worker.getContent().equals(dataSourceTask.getContent())) { - continue; //check next worker - } - //same data source, check module (by name, not id, since different instances) - if (worker.getModule().getName().equals(dataSourceTaskModule.getName())) { - alreadyRunning = true; - logger.log(Level.INFO, "Data Source Ingester <" + dataSourceTask.getContent() - + ", " + dataSourceTaskModule.getName() + "> is already running"); - break; - } - } - //checked all workers - if (alreadyRunning == false) { - logger.log(Level.INFO, "Starting new data source Ingester <" + dataSourceTask.getContent() - + ", " + dataSourceTaskModule.getName() + ">"); - //data source modules are now initialized per instance - - IngestModuleInit moduleInit = new IngestModuleInit(); - - PipelineContext dataSourcepipelineContext = - dataSourceTask.getPipelineContext(); - - final IngestDataSourceThread newDataSourceWorker = new IngestDataSourceThread(this, - dataSourcepipelineContext, dataSourceTask.getContent(), dataSourceTaskModule, moduleInit); - try { - newDataSourceWorker.init(); - } catch (Exception e) { - logger.log(Level.SEVERE, "DataSource ingest module failed init(): " + dataSourceTaskModule.getName(), e); - allInited = false; - failedModule = dataSourceTaskModule; - errorMessage = e.getMessage(); - break; - } - dataSourceIngesters.add(newDataSourceWorker); - // Add the worker to the list of new IngestThreads to be started - // if all modules initialize. - newThreads.add(newDataSourceWorker); - } - } - } +// while (dataSourceScheduler.hasNext()) { +// if (allInited == false) { +// break; +// } +// //dequeue +// // get next data source content and set of modules +// final DataSourceTask dataSourceTask = dataSourceScheduler.next(); +// +// // check if each module for this data source content is already running +// for (IngestModuleDataSource dataSourceTaskModule : dataSourceTask.getModules()) { +// boolean alreadyRunning = false; +// for (IngestDataSourceThread worker : dataSourceIngesters) { +// // ignore threads that are on different data sources +// if (!worker.getContent().equals(dataSourceTask.getDataSource())) { +// continue; //check next worker +// } +// //same data source, check module (by name, not id, since different instances) +// if (worker.getModule().getName().equals(dataSourceTaskModule.getName())) { +// alreadyRunning = true; +// logger.log(Level.INFO, "Data Source Ingester <" + dataSourceTask.getDataSource() +// + ", " + dataSourceTaskModule.getName() + "> is already running"); +// break; +// } +// } +// //checked all workers +// if (alreadyRunning == false) { +// logger.log(Level.INFO, "Starting new data source Ingester <" + dataSourceTask.getDataSource() +// + ", " + dataSourceTaskModule.getName() + ">"); +// //data source modules are now initialized per instance +// +// IngestModuleInit moduleInit = new IngestModuleInit(); +// +// PipelineContext dataSourcepipelineContext = +// dataSourceTask.getPipelineContext(); +// +// final IngestDataSourceThread newDataSourceWorker = new IngestDataSourceThread(this, +// dataSourcepipelineContext, dataSourceTask.getDataSource(), dataSourceTaskModule, moduleInit); +// try { +// newDataSourceWorker.init(); +// } catch (Exception e) { +// logger.log(Level.SEVERE, "DataSource ingest module failed init(): " + dataSourceTaskModule.getName(), e); +// allInited = false; +// failedModule = dataSourceTaskModule; +// errorMessage = e.getMessage(); +// break; +// } +// dataSourceIngesters.add(newDataSourceWorker); +// // Add the worker to the list of new IngestThreads to be started +// // if all modules initialize. +// newThreads.add(newDataSourceWorker); +// } +// } +// } // Check to make sure all modules initialized - if (allInited == false) { - displayInitError(failedModule.getName(), errorMessage); - dataSourceIngesters.removeAll(newThreads); - return; - } +// if (allInited == false) { +// displayInitError(failedModule.getName(), errorMessage); +// dataSourceIngesters.removeAll(newThreads); +// return; +// } - //AbstractFile ingester boolean startAbstractFileIngester = false; if (fileScheduler.hasNext()) { if (abstractFileIngester == null) { @@ -441,47 +415,51 @@ public class IngestManager { } if (startAbstractFileIngester) { - stats = new IngestManagerStats(); +// stats = new IngestManagerStats(); RJCTODO: This class may or may not be reimplemented abstractFileIngester = new IngestAbstractFileProcessor(); //init all fs modules, everytime new worker starts - for (IngestModuleAbstractFile s : abstractFileModules) { - // This was added at one point to remove the message about non-configured HashDB even - // when HashDB was not enabled. However, it adds some problems if a second ingest is - // kicked off whiel the first is ongoing. If the 2nd ingest has a module enabled that - // was not initially enabled, it will never have init called. We also need to call - // complete and need a similar way of passing down data to that thread to tell it which - // it shoudl call complete on (otherwise it could call complete on a module that never - // had init() called. - //if (fileScheduler.hasModuleEnqueued(s) == false) { - // continue; - //} - IngestModuleInit moduleInit = new IngestModuleInit(); - try { - s.init(moduleInit); - } catch (Exception e) { - logger.log(Level.SEVERE, "File ingest module failed init(): " + s.getName(), e); - allInited = false; - failedModule = s; - errorMessage = e.getMessage(); - break; - } - } + // RJCTODO: Currently don't have an early module init concept quite like this, modules will be initialized + // when the thread gets its pipeline instance(s) from the data source task. +// for (IngestModuleAbstractFile s : abstractFileModules) { +// // This was added at one point to remove the message about non-configured HashDB even +// // when HashDB was not enabled. However, it adds some problems if a second ingest is +// // kicked off whiel the first is ongoing. If the 2nd ingest has a module enabled that +// // was not initially enabled, it will never have init called. We also need to call +// // complete and need a similar way of passing down data to that thread to tell it which +// // it shoudl call complete on (otherwise it could call complete on a module that never +// // had init() called. +// //if (fileScheduler.hasModuleEnqueued(s) == false) { +// // continue; +// //} +// IngestModuleInit moduleInit = new IngestModuleInit(); +// try { +// s.init(moduleInit); +// } catch (Exception e) { +// logger.log(Level.SEVERE, "File ingest module failed init(): " + s.getName(), e); +// allInited = false; +// failedModule = s; +// errorMessage = e.getMessage(); +// break; +// } +// } } if (allInited) { + // RJCTODO: Data source ingest temporarily disabled // Start DataSourceIngestModules - for (IngestDataSourceThread dataSourceWorker : newThreads) { - dataSourceWorker.execute(); - IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), dataSourceWorker.getModule().getName()); - } +// for (IngestDataSourceThread dataSourceWorker : newThreads) { +// dataSourceWorker.execute(); +// IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), dataSourceWorker.getModule().getName()); +// } // Start AbstractFileIngestModules if (startAbstractFileIngester) { abstractFileIngester.execute(); } } else { - displayInitError(failedModule.getName(), errorMessage); - dataSourceIngesters.removeAll(newThreads); + // RJCTODO: DO not really have this failed module conept at this point +// displayInitError(failedModule.getName(), errorMessage); +// dataSourceIngesters.removeAll(newThreads); abstractFileIngester = null; } } @@ -492,14 +470,18 @@ public class IngestManager { * @param moduleName The name of the module that failed to initialize. * @param errorMessage The message gotten from the exception that was thrown. */ - private void displayInitError(String moduleName, String errorMessage) { - MessageNotifyUtil.Message.error( - "Failed to load " + moduleName + " ingest module.\n\n" - + "No ingest modules will be run. Please disable the module " - + "or fix the error and restart ingest by right clicking on " - + "the data source and selecting Run Ingest Modules.\n\n" - + "Error: " + errorMessage); - } + // RJCTODO: Do not have an implementation of this early load concept yet + // Perhaps the thing to do is to create and destroy a pipeline every time + // a task is created...that would lead to spurious events, though. Could + // build the first instances of the pieplines per thread? +// private void displayInitError(String moduleName, String errorMessage) { +// MessageNotifyUtil.Message.error( +// "Failed to load " + moduleName + " ingest module.\n\n" +// + "No ingest modules will be run. Please disable the module " +// + "or fix the error and restart ingest by right clicking on " +// + "the data source and selecting Run Ingest Modules.\n\n" +// + "Error: " + errorMessage); +// } /** * stop currently running threads if any (e.g. when changing a case) @@ -517,17 +499,18 @@ public class IngestManager { //stop module workers if (abstractFileIngester != null) { + // RJCTODO: rework this //send signals to all file modules - for (IngestModuleAbstractFile s : this.abstractFileModules) { - if (isModuleRunning(s)) { - try { - s.stop(); - } catch (Exception e) { - logger.log(Level.WARNING, "Unexpected exception while stopping module: " + s.getName(), e); - } - } - - } +// for (IngestModuleAbstractFile s : this.abstractFileModules) { +// if (isModuleRunning(s)) { +// try { +// s.stop(); +// } catch (Exception e) { +// logger.log(Level.WARNING, "Unexpected exception while stopping module: " + s.getName(), e); +// } +// } +// +// } //stop fs ingester thread boolean cancelled = abstractFileIngester.cancel(true); if (!cancelled) { @@ -535,37 +518,38 @@ public class IngestManager { } abstractFileIngester = null; - } - List toStop = new ArrayList(); - toStop.addAll(dataSourceIngesters); - - for (IngestDataSourceThread dataSourceWorker : toStop) { - IngestModuleDataSource s = dataSourceWorker.getModule(); - - //stop the worker thread if thread is running - boolean cancelled = dataSourceWorker.cancel(true); - if (!cancelled) { - logger.log(Level.INFO, "Unable to cancel data source ingest worker for module: " - + dataSourceWorker.getModule().getName() + " data source: " + dataSourceWorker.getContent().getName()); - } - - //stop notification to module to cleanup resources - if (isModuleRunning(s)) { - try { - dataSourceWorker.getModule().stop(); - } catch (Exception e) { - logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e); - } - } - } + // RJCTODO: Restore or replace use of IngestDataSourceThread +// List toStop = new ArrayList<>(); +// toStop.addAll(dataSourceIngesters); +// +// for (IngestDataSourceThread dataSourceWorker : toStop) { +// IngestModuleDataSource s = dataSourceWorker.getModule(); +// +// //stop the worker thread if thread is running +// boolean cancelled = dataSourceWorker.cancel(true); +// if (!cancelled) { +// logger.log(Level.INFO, "Unable to cancel data source ingest worker for module: " +// + dataSourceWorker.getModule().getName() + " data source: " + dataSourceWorker.getContent().getName()); +// } +// +// //stop notification to module to cleanup resources +// if (isModuleRunning(s)) { +// try { +// dataSourceWorker.getModule().stop(); +// } catch (Exception e) { +// logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e); +// } +// } +// } logger.log(Level.INFO, "stopped all"); } + // RJCTODO: This comment is misleading /** - * Test if any ingester modules are running + * Test if any ingest modules are running * * @return true if any module is running, false otherwise */ @@ -579,26 +563,6 @@ public class IngestManager { } else { return false; } - - } - - /** - * Test is any file ingest modules are running. - * - * @return true if any ingest modules are running, false otherwise - */ - public synchronized boolean areModulesRunning() { - for (IngestModuleAbstract serv : abstractFileModules) { - if (serv.hasBackgroundJobsRunning()) { - return true; - } - } - for (IngestDataSourceThread thread : dataSourceIngesters) { - if (isModuleRunning(thread.getModule())) { - return false; - } - } - return false; } /** @@ -625,116 +589,53 @@ public class IngestManager { * check the status of the data-source-level ingest pipeline */ public synchronized boolean isDataSourceIngestRunning() { - if (dataSourceIngesters.isEmpty()) { - return false; - } + // RJCTODO: Data source ingest temporarily disabled +// if (dataSourceIngesters.isEmpty()) { +// return false; +// } //in case there are still data source ingesters in the queue but already done - boolean allDone = true; - for (IngestDataSourceThread ii : dataSourceIngesters) { - if (ii.isDone() == false) { - allDone = false; - break; - } - } - if (allDone) { - return false; - } else { - return true; - } - } - - /** - * check if the module is running (was started and not yet complete/stopped) - * give a complete answer, i.e. it's already consumed all files but it might - * have background threads running - * - */ - public boolean isModuleRunning(final IngestModuleAbstract module) { - - if (module.getType() == IngestModuleAbstract.ModuleType.AbstractFile) { - IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); - - if (fileScheduler.hasModuleEnqueued((IngestModuleAbstractFile) module)) { - //has work enqueued, so running - return true; - } else { - //not in the queue, but could still have bkg work running - return module.hasBackgroundJobsRunning(); - } - - } else { - //data source module - synchronized (this) { - if (dataSourceIngesters.isEmpty()) { - return false; - } - IngestDataSourceThread imt = null; - for (IngestDataSourceThread ii : dataSourceIngesters) { - if (ii.getModule().equals(module)) { - imt = ii; - break; - } - } - - if (imt == null) { - return false; - } - - if (imt.isDone() == false) { - return true; - } else { - return false; - } - } - } +// boolean allDone = true; +// for (IngestDataSourceThread ii : dataSourceIngesters) { +// if (ii.isDone() == false) { +// allDone = false; +// break; +// } +// } +// if (allDone) { +// return false; +// } else { +// return true; +// } + return false; } /** * Check if data source scheduler has files in queues * @return true if more sources in queues, false otherwise */ - public boolean getDataSourceSchedulerHasNext() { - return this.scheduler.getDataSourceScheduler().hasNext(); - } + // RJCTODO: What is this little wrapper about? +// public boolean getDataSourceSchedulerHasNext() { +// return this.scheduler.getDataSourceScheduler().hasNext(); +// } /** * Check if file scheduler has files in queues * @return true if more files in queues, false otherwise */ - public boolean getFileSchedulerHasNext() { - return scheduler.getFileScheduler().hasNext(); - } + // RJCTODO: What is this little wrapper about? +// public boolean getFileSchedulerHasNext() { +// return scheduler.getFileScheduler().hasNext(); +// } - - /** - * returns if manager is currently configured to process unalloc space - * - * @return true if process unaloc space is set - */ - boolean getProcessUnallocSpace() { - return processUnallocSpace; - } - - /** - * Sets process unalloc space setting on the manager - * - * @param processUnallocSpace - */ - public void setProcessUnallocSpace(boolean processUnallocSpace) { - this.processUnallocSpace = processUnallocSpace; - } - + // RJCTODO: May not be used. May or may not reimplement stats class. /** * returns ingest summary report (how many files ingested, any errors, etc) */ - String getReport() { - return stats.toString(); - } +// String getReport() { +// return stats.toString(); +// } - - - /** * Module publishes message using InegestManager handle Does not block. The * message gets enqueued in the GUI thread and displayed in a widget @@ -743,18 +644,19 @@ public class IngestManager { * filter them out (slower) */ void postMessage(final IngestMessage message) { - - if (stats != null) { - //record the error for stats, if stats are running - if (message.getMessageType() == MessageType.ERROR) { - stats.addError(message.getSource()); - } - } + // RJCTODO: May ort may not reimplement stats class +// if (stats != null) { +// //record the error for stats, if stats are running +// if (message.getMessageType() == MessageType.ERROR) { +// stats.addError(message.getSource()); +// } +// } if (ui != null) { ui.displayMessage(message); } } + // RJCTODO: Um, what is this? Appears to be used only by PstParser, seems a bit awkward /** * Get free disk space of a drive where ingest data are written to That * drive is being monitored by IngestMonitor thread when ingest is running. @@ -770,204 +672,179 @@ public class IngestManager { } } - /** - * helper to return all loaded data-source ingest modules managed sorted in order as - * specified in pipeline_config XML - */ - public List enumerateDataSourceModules() { - return moduleLoader.getDataSourceIngestModules(); - } - - /** - * helper to return all loaded file modules managed sorted in order as - * specified in pipeline_config XML - */ - public List enumerateAbstractFileModules() { - return moduleLoader.getAbstractFileIngestModules(); - } - - public List enumerateAllModules() { - List modules = new ArrayList<>(); - modules.addAll(enumerateDataSourceModules()); - modules.addAll(enumerateAbstractFileModules()); - return modules; - } - - List getIngestModuleFactories() { - return moduleLoader.getIngestModuleFactories(); - } - + // RJCTODO: Data source ingest is temporarily disabled //data source worker to remove itself when complete or interrupted - void removeDataSourceIngestWorker(IngestDataSourceThread worker) { - //remove worker - synchronized (this) { - dataSourceIngesters.remove(worker); - } - } +// void removeDataSourceIngestWorker(IngestDataSourceThread worker) { +// //remove worker +// synchronized (this) { +// dataSourceIngesters.remove(worker); +// } +// } +// RJCTODO: Decide whether or not to reimplement this class /** * collects IngestManager statistics during runtime */ - private class IngestManagerStats { - - private Date startTime; - private Date endTime; - private int errorsTotal; - private Map errors; - private final DateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - private final StopWatch timer = new StopWatch(); - private IngestModuleAbstract currentModuleForTimer; - //file module timing stats, datasource module timers are logged in IngestDataSourceThread class - private final Map fileModuleTimers = new HashMap(); - - IngestManagerStats() { - errors = new HashMap(); - } - - /** - * records start time of the file process for the module must be - * followed by logFileModuleEndProcess for the same module - * - * @param module to record start time for processing a file - */ - void logFileModuleStartProcess(IngestModuleAbstract module) { - timer.reset(); - timer.start(); - currentModuleForTimer = module; - } - - /** - * records stop time of the file process for the module must be preceded - * by logFileModuleStartProcess for the same module - * - * @param module to record stop time for processing a file - */ - void logFileModuleEndProcess(IngestModuleAbstract module) { - timer.stop(); - if (module != currentModuleForTimer) { - logger.log(Level.WARNING, "Invalid module passed in to record stop processing: " + module.getName() - + ", expected: " + currentModuleForTimer.getName()); - } else { - final long elapsed = timer.getElapsedTime(); - final long current = fileModuleTimers.get(module.getName()); - fileModuleTimers.put(module.getName(), elapsed + current); - } - - currentModuleForTimer = null; - } - - String getFileModuleStats() { - StringBuilder sb = new StringBuilder(); - for (final String moduleName : fileModuleTimers.keySet()) { - sb.append(moduleName).append(" took: ") - .append(fileModuleTimers.get(moduleName) / 1000) - .append(" secs. to process()").append('\n'); - } - return sb.toString(); - } - - @Override - public String toString() { - final String EOL = System.getProperty("line.separator"); - StringBuilder sb = new StringBuilder(); - if (startTime != null) { - sb.append("Start time: ").append(dateFormatter.format(startTime)).append(EOL); - } - if (endTime != null) { - sb.append("End time: ").append(dateFormatter.format(endTime)).append(EOL); - } - sb.append("Total ingest time: ").append(getTotalTimeString()).append(EOL); - sb.append("Total errors: ").append(errorsTotal).append(EOL); - if (errorsTotal > 0) { - sb.append("Errors per module:"); - for (String moduleName : errors.keySet()) { - sb.append("\t").append(moduleName).append(": ").append(errors.get(moduleName)).append(EOL); - } - } - return sb.toString(); - } - - public String toHtmlString() { - StringBuilder sb = new StringBuilder(); - sb.append(""); - sb.append("Ingest time: ").append(getTotalTimeString()).append("
"); - sb.append("Total errors: ").append(errorsTotal).append("
"); - sb.append("\n"); - - for (final String moduleName : fileModuleTimers.keySet()) { - sb.append("\n"); - } - sb.append("
ModuleTimeErrors
").append(moduleName).append(""); - sb.append(msToString(fileModuleTimers.get(moduleName))).append(""); - if (errors.get(moduleName) == null) { - sb.append("0"); - } else { - sb.append(errors.get(moduleName)); - } - sb.append("
"); - sb.append(""); - return sb.toString(); - } - - void start() { - startTime = new Date(); - - for (IngestModuleAbstractFile module : abstractFileModules) { - fileModuleTimers.put(module.getName(), 0L); - } - } - - void end() { - endTime = new Date(); - } - - long getTotalTime() { - if (startTime == null || endTime == null) { - return 0; - } - return endTime.getTime() - startTime.getTime(); - } - - String getStartTimeString() { - return dateFormatter.format(startTime); - } - - String getEndTimeString() { - return dateFormatter.format(endTime); - } - - /** - * convert time in miliseconds to printable string in XX:YY:ZZ format. - * @param ms - * @return - */ - private String msToString(long ms) { - long hours = TimeUnit.MILLISECONDS.toHours(ms); - ms -= TimeUnit.HOURS.toMillis(hours); - long minutes = TimeUnit.MILLISECONDS.toMinutes(ms); - ms -= TimeUnit.MINUTES.toMillis(minutes); - long seconds = TimeUnit.MILLISECONDS.toSeconds(ms); - final StringBuilder sb = new StringBuilder(); - sb.append(hours < 10 ? "0" : "").append(hours).append(':').append(minutes < 10 ? "0" : "").append(minutes).append(':').append(seconds < 10 ? "0" : "").append(seconds); - return sb.toString(); - } - - String getTotalTimeString() { - long ms = getTotalTime(); - return msToString(ms); - } - - synchronized void addError(IngestModuleAbstract source) { - ++errorsTotal; - String moduleName = source.getName(); - Integer curModuleErrorI = errors.get(moduleName); - if (curModuleErrorI == null) { - errors.put(moduleName, 1); - } else { - errors.put(moduleName, curModuleErrorI + 1); - } - } - } +// private class IngestManagerStats { +// +// private Date startTime; +// private Date endTime; +// private int errorsTotal; +// private Map errors; +// private final DateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); +// private final StopWatch timer = new StopWatch(); +// private IngestModuleAbstract currentModuleForTimer; +// //file module timing stats, datasource module timers are logged in IngestDataSourceThread class +// private final Map fileModuleTimers = new HashMap(); +// +// IngestManagerStats() { +// errors = new HashMap(); +// } +// +// /** +// * records start time of the file process for the module must be +// * followed by logFileModuleEndProcess for the same module +// * +// * @param module to record start time for processing a file +// */ +// void logFileModuleStartProcess(IngestModuleAbstract module) { +// timer.reset(); +// timer.start(); +// currentModuleForTimer = module; +// } +// +// /** +// * records stop time of the file process for the module must be preceded +// * by logFileModuleStartProcess for the same module +// * +// * @param module to record stop time for processing a file +// */ +// void logFileModuleEndProcess(IngestModuleAbstract module) { +// timer.stop(); +// if (module != currentModuleForTimer) { +// logger.log(Level.WARNING, "Invalid module passed in to record stop processing: " + module.getName() +// + ", expected: " + currentModuleForTimer.getName()); +// } else { +// final long elapsed = timer.getElapsedTime(); +// final long current = fileModuleTimers.get(module.getName()); +// fileModuleTimers.put(module.getName(), elapsed + current); +// } +// +// currentModuleForTimer = null; +// } +// +// String getFileModuleStats() { +// StringBuilder sb = new StringBuilder(); +// for (final String moduleName : fileModuleTimers.keySet()) { +// sb.append(moduleName).append(" took: ") +// .append(fileModuleTimers.get(moduleName) / 1000) +// .append(" secs. to process()").append('\n'); +// } +// return sb.toString(); +// } +// +// @Override +// public String toString() { +// final String EOL = System.getProperty("line.separator"); +// StringBuilder sb = new StringBuilder(); +// if (startTime != null) { +// sb.append("Start time: ").append(dateFormatter.format(startTime)).append(EOL); +// } +// if (endTime != null) { +// sb.append("End time: ").append(dateFormatter.format(endTime)).append(EOL); +// } +// sb.append("Total ingest time: ").append(getTotalTimeString()).append(EOL); +// sb.append("Total errors: ").append(errorsTotal).append(EOL); +// if (errorsTotal > 0) { +// sb.append("Errors per module:"); +// for (String moduleName : errors.keySet()) { +// sb.append("\t").append(moduleName).append(": ").append(errors.get(moduleName)).append(EOL); +// } +// } +// return sb.toString(); +// } +// +// public String toHtmlString() { +// StringBuilder sb = new StringBuilder(); +// sb.append(""); +// sb.append("Ingest time: ").append(getTotalTimeString()).append("
"); +// sb.append("Total errors: ").append(errorsTotal).append("
"); +// sb.append("\n"); +// +// for (final String moduleName : fileModuleTimers.keySet()) { +// sb.append("\n"); +// } +// sb.append("
ModuleTimeErrors
").append(moduleName).append(""); +// sb.append(msToString(fileModuleTimers.get(moduleName))).append(""); +// if (errors.get(moduleName) == null) { +// sb.append("0"); +// } else { +// sb.append(errors.get(moduleName)); +// } +// sb.append("
"); +// sb.append(""); +// return sb.toString(); +// } +// +// void start() { +// startTime = new Date(); +// +// for (IngestModuleAbstractFile module : abstractFileModules) { +// fileModuleTimers.put(module.getName(), 0L); +// } +// } +// +// void end() { +// endTime = new Date(); +// } +// +// long getTotalTime() { +// if (startTime == null || endTime == null) { +// return 0; +// } +// return endTime.getTime() - startTime.getTime(); +// } +// +// String getStartTimeString() { +// return dateFormatter.format(startTime); +// } +// +// String getEndTimeString() { +// return dateFormatter.format(endTime); +// } +// +// /** +// * convert time in miliseconds to printable string in XX:YY:ZZ format. +// * @param ms +// * @return +// */ +// private String msToString(long ms) { +// long hours = TimeUnit.MILLISECONDS.toHours(ms); +// ms -= TimeUnit.HOURS.toMillis(hours); +// long minutes = TimeUnit.MILLISECONDS.toMinutes(ms); +// ms -= TimeUnit.MINUTES.toMillis(minutes); +// long seconds = TimeUnit.MILLISECONDS.toSeconds(ms); +// final StringBuilder sb = new StringBuilder(); +// sb.append(hours < 10 ? "0" : "").append(hours).append(':').append(minutes < 10 ? "0" : "").append(minutes).append(':').append(seconds < 10 ? "0" : "").append(seconds); +// return sb.toString(); +// } +// +// String getTotalTimeString() { +// long ms = getTotalTime(); +// return msToString(ms); +// } +// +// synchronized void addError(IngestModuleAbstract source) { +// ++errorsTotal; +// String moduleName = source.getName(); +// Integer curModuleErrorI = errors.get(moduleName); +// if (curModuleErrorI == null) { +// errors.put(moduleName, 1); +// } else { +// errors.put(moduleName, curModuleErrorI + 1); +// } +// } +// } /** * File ingest pipeline processor. Worker thread that queries @@ -980,20 +857,33 @@ public class IngestManager { private class IngestAbstractFileProcessor extends SwingWorker { private Logger logger = Logger.getLogger(IngestAbstractFileProcessor.class.getName()); - //progress bar private ProgressHandle progress; - + @Override protected Object doInBackground() throws Exception { logger.log(Level.INFO, "Starting background ingest file processor"); logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); - stats.start(); + // RJCTODO: This may or may not be reimplemented +// stats.start(); + // RJCTODO: Interim version of this follows, is it really needed? + // Could replace with an ingest started event? //notify main thread modules started - for (IngestModuleAbstractFile s : abstractFileModules) { - IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), s.getName()); +// for (IngestModuleAbstractFile s : abstractFileModules) { +// IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), s.getName()); +// } + if (null != currentTask) { + List moduleTemplates = currentTask.getIngestPipelines().getIngestModuleTemplates(); + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + if (moduleTemplate.isEnabled()) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.isFileIngestModuleFactory()) { + fireModuleEvent(IngestModuleEvent.STARTED.toString(), moduleFactory.getModuleDisplayName()); + } + } + } } final String displayName = "File Ingest"; @@ -1020,66 +910,66 @@ public class IngestManager { //process AbstractFiles queue while (fileScheduler.hasNext()) { - final FileTask fileTask = fileScheduler.next(); - final DataSourceTask dataSourceTask = fileTask.getDataSourceTask(); - final PipelineContext filepipelineContext = dataSourceTask.getPipelineContext(); + FileTask fileTask = fileScheduler.next(); + fileTask.execute(1); // RJCTODO: Fake thread id, may not need thread ids - final AbstractFile fileToProcess = fileTask.getFile(); - + // RJCTODO: This may be obsolete //clear return values from modules for last file - synchronized (abstractFileModulesRetValues) { - abstractFileModulesRetValues.clear(); - } +// synchronized (abstractFileModulesRetValues) { +// abstractFileModulesRetValues.clear(); +// } //logger.log(Level.INFO, "IngestManager: Processing: {0}", fileToProcess.getName()); - - for (IngestModuleAbstractFile module : dataSourceTask.getModules()) { - //process the file with every file module - if (isCancelled()) { - logger.log(Level.INFO, "Terminating file ingest due to cancellation."); - return null; - } - progress.progress(fileToProcess.getName() + " (" + module.getName() + ")", processedFiles); - try { - stats.logFileModuleStartProcess(module); - IngestModuleAbstractFile.ProcessResult result = module.process(filepipelineContext, fileToProcess); - stats.logFileModuleEndProcess(module); - - //store the result for subsequent modules for this file - synchronized (abstractFileModulesRetValues) { - abstractFileModulesRetValues.put(module.getName(), result); - } - - } catch (Exception e) { - logger.log(Level.SEVERE, "Error: unexpected exception from module: " + module.getName(), e); - stats.addError(module); - } catch (OutOfMemoryError e) { - logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e); - stats.addError(module); - } - - } //end for every module - - //free the internal file resource after done with every module - fileToProcess.close(); - - // notify listeners thsi file is done - fireFileDone(fileToProcess.getId()); + // RJCTODO: Note cancellation check after each module runs +// for (IngestModuleAbstractFile module : dataSourceTask.getModules()) { +// //process the file with every file module +// if (isCancelled()) { +// logger.log(Level.INFO, "Terminating file ingest due to cancellation."); +// return null; +// } +// progress.progress(fileToProcess.getName() + " (" + module.getName() + ")", processedFiles); +// +// try { +// stats.logFileModuleStartProcess(module); +// IngestModuleAbstractFile.ProcessResult result = module.process(filepipelineContext, fileToProcess); +// stats.logFileModuleEndProcess(module); +// +// //store the result for subsequent modules for this file +// synchronized (abstractFileModulesRetValues) { +// abstractFileModulesRetValues.put(module.getName(), result); +// } +// +// } catch (Exception e) { +// logger.log(Level.SEVERE, "Error: unexpected exception from module: " + module.getName(), e); +// stats.addError(module); +// } catch (OutOfMemoryError e) { +// logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e); +// stats.addError(module); +// } +// +// } //end for every module +// +// //free the internal file resource after done with every module +// fileToProcess.close(); +// +// // notify listeners thsi file is done +// fireFileDone(fileToProcess.getId()); + // RJCTODO: Move this to the DataSourceTask -> DataSourceIngestJob, FileIngestJob (?); "task" is usually used for Runnable int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst(); if (newTotalEnqueuedFiles > totalEnqueuedFiles) { //update if new enqueued totalEnqueuedFiles = newTotalEnqueuedFiles + 1;// + processedFiles + 1; - //processedFiles = 0; - //reset + //processedFiles = 0; // RJCTODO: Previously commented out + //reset // RJCTODO: Previously commented out progress.switchToIndeterminate(); progress.switchToDeterminate(totalEnqueuedFiles); } if (processedFiles < totalEnqueuedFiles) { //fix for now to handle the same datasource Content enqueued twice ++processedFiles; } - //--totalEnqueuedFiles; + //--totalEnqueuedFiles; // RJCTODO: Previously commented out } //end of for every AbstractFile @@ -1093,27 +983,38 @@ public class IngestManager { super.get(); //block and get all exceptions thrown while doInBackground() //notify modules of completion if (!this.isCancelled()) { - for (IngestModuleAbstractFile s : abstractFileModules) { - try { - s.complete(); - IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), s.getName()); - } - catch (Exception ex) { - logger.log(Level.SEVERE, "Module " + s.getName() + " threw exception during call to complete()", ex); +// for (IngestModuleAbstractFile s : abstractFileModules) { +// try { +// s.complete(); +// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), s.getName()); +// } +// catch (Exception ex) { +// logger.log(Level.SEVERE, "Module " + s.getName() + " threw exception during call to complete()", ex); +// } +// } + if (null != currentTask) { + currentTask.getIngestPipelines().completeFileIngestPipeline(); + List moduleTemplates = currentTask.getIngestPipelines().getIngestModuleTemplates(); + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + if (moduleTemplate.isEnabled()) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.isFileIngestModuleFactory()) { + fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), moduleFactory.getModuleDisplayName()); + } + } } } } - + + // RJCTODO: Running the garbage collector? Really? For the sake of stats? logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); logger.log(Level.INFO, "Freeing jvm heap resources post file pipeline run"); System.gc(); logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); - } catch (CancellationException e) { + } catch (CancellationException | InterruptedException e) { //task was cancelled handleInterruption(); - } catch (InterruptedException ex) { - handleInterruption(); } catch (ExecutionException ex) { handleInterruption(); logger.log(Level.SEVERE, "Fatal error during ingest.", ex); @@ -1121,34 +1022,51 @@ public class IngestManager { handleInterruption(); logger.log(Level.SEVERE, "Fatal error during ingest.", ex); } finally { - stats.end(); + // RJCTODO: Stats may or may not be reimplemented +// stats.end(); progress.finish(); if (!this.isCancelled()) { - logger.log(Level.INFO, "Summary Report: " + stats.toString()); - logger.log(Level.INFO, "File module timings: " + stats.getFileModuleStats()); + // RJCTODO: Stats may or may not be reimplemented +// logger.log(Level.INFO, "Summary Report: " + stats.toString()); +// logger.log(Level.INFO, "File module timings: " + stats.getFileModuleStats()); if (ui != null) { - logger.log(Level.INFO, "Ingest messages count: " + ui.getMessagesCount()); + logger.log(Level.INFO, "Ingest messages count: {0}", ui.getMessagesCount()); } - - IngestManager.this.postMessage(IngestMessage.createManagerMessage("File Ingest Complete", - stats.toHtmlString())); + // RJCTODO: Stats may or may not be reimplemented +// IngestManager.this.postMessage(IngestMessage.createManagerMessage("File Ingest Complete", +// stats.toHtmlString())); } } - } private void handleInterruption() { - for (IngestModuleAbstractFile s : abstractFileModules) { - if (isModuleRunning(s)) { - try { - s.stop(); - } catch (Exception e) { - logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e); +// for (IngestModuleAbstractFile s : abstractFileModules) { + // RJCTODO: This is going away +// if (isModuleRunning(s)) { +// try { +// s.stop(); +// } catch (Exception e) { +// logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e); +// } +// } +// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), s.getName()); +// } + if (null != currentTask) { + currentTask.getIngestPipelines().stopFileIngestPipeline(); + List moduleTemplates = currentTask.getIngestPipelines().getIngestModuleTemplates(); + for (IngestModuleTemplate moduleTemplate : moduleTemplates) { + if (moduleTemplate.isEnabled()) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.isFileIngestModuleFactory()) { + fireModuleEvent(IngestModuleEvent.STOPPED.toString(), moduleFactory.getModuleDisplayName()); + } + } } } - IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), s.getName()); - } + + + //empty queues scheduler.getFileScheduler().empty(); } @@ -1158,20 +1076,20 @@ public class IngestManager { * Thread that adds content/file and module pairs to queues. * Starts pipelines when done. */ private class EnqueueWorker extends SwingWorker { - - private List modules; - private final List inputs; + private final List dataSources; + private final List moduleTemplates; + private final boolean processUnallocatedSpace; private final Logger logger = Logger.getLogger(EnqueueWorker.class.getName()); + private ProgressHandle progress; // RJCTODO: Is this useful? - EnqueueWorker(final List modules, final List inputs) { - this.modules = modules; - this.inputs = inputs; + EnqueueWorker(final List dataSources, final List moduleTemplates, final boolean processUnallocatedSpace) { + this.dataSources = dataSources; + this.moduleTemplates = moduleTemplates; + this. processUnallocatedSpace = processUnallocatedSpace; } - private ProgressHandle progress; @Override protected Object doInBackground() throws Exception { - final String displayName = "Queueing Ingest"; progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() { @Override @@ -1184,144 +1102,72 @@ public class IngestManager { } }); - progress.start(2 * inputs.size()); - //progress.switchToIndeterminate(); - queueAll(modules, inputs); + progress.start(2 * dataSources.size()); + int processed = 0; + for (Content dataSource : dataSources) { + final String inputName = dataSource.getName(); + DataSourceTask dataSourceTask = new DataSourceTask(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace); + logger.log(Level.INFO, "Queing data source ingest task: {0}", dataSourceTask); + progress.progress("DataSource Ingest" + " " + inputName, processed); + scheduler.getDataSourceScheduler().schedule(dataSourceTask); + progress.progress("DataSource Ingest" + " " + inputName, ++processed); + + logger.log(Level.INFO, "Queing file ingest task: {0}", dataSourceTask); + progress.progress("File Ingest" + " " + inputName, processed); + scheduler.getFileScheduler().scheduleIngestOfFiles(dataSourceTask); + progress.progress("File Ingest" + " " + inputName, ++processed); + + currentTask = dataSourceTask; // RJCTODO: temporary glue code + } return null; } - /* clean up or start the worker threads */ @Override protected void done() { try { - super.get(); //block and get all exceptions thrown while doInBackground() - } catch (CancellationException e) { - //task was cancelled - handleInterruption(e); - } catch (InterruptedException ex) { + super.get(); + } + catch (CancellationException | InterruptedException | ExecutionException ex) { handleInterruption(ex); - } catch (ExecutionException ex) { + } + catch (Exception ex) { handleInterruption(ex); - - - } catch (Exception ex) { - handleInterruption(ex); - - } finally { - //queing end + } + finally { if (this.isCancelled()) { - //empty queues handleInterruption(new Exception()); - } else { - //start ingest workers + } + else { startAll(); } progress.finish(); } } - /** - * Create modules and schedule analysis - * @param modules Modules to load into pipeline - * @param inputs List of parent data sources - */ - private void queueAll(List modules, final List inputs) { - - int processed = 0; - for (Content input : inputs) { - final String inputName = input.getName(); - - // Create a new instance of the modules for each data source - final List dataSourceMods = new ArrayList(); - final List fileMods = new ArrayList(); - - for (IngestModuleAbstract module : modules) { - if (isCancelled()) { - logger.log(Level.INFO, "Terminating ingest queueing due to cancellation."); - return; - } - - final String moduleName = module.getName(); - progress.progress(moduleName + " " + inputName, processed); - - switch (module.getType()) { - case DataSource: - final IngestModuleDataSource newModuleInstance = - (IngestModuleDataSource) moduleLoader.getNewIngestModuleInstance(module); - if (newModuleInstance != null) { - dataSourceMods.add(newModuleInstance); - } else { - logger.log(Level.INFO, "Error loading module and adding input " + inputName - + " with module " + module.getName()); - } - break; - - case AbstractFile: - //enqueue the same singleton AbstractFile module - logger.log(Level.INFO, "Adding input " + inputName - + " for AbstractFileModule " + module.getName()); - - fileMods.add((IngestModuleAbstractFile) module); - break; - - default: - logger.log(Level.SEVERE, "Unexpected module type: " + module.getType().name()); - } - - }//for modules - - - /* Schedule the data source-level ingest modules for this data source */ - final DataSourceTask dataSourceTask = - new DataSourceTask(input, dataSourceMods, getProcessUnallocSpace()); - - - logger.log(Level.INFO, "Queing data source ingest task: " + dataSourceTask); - progress.progress("DataSource Ingest" + " " + inputName, processed); - final IngestScheduler.DataSourceScheduler dataSourceScheduler = scheduler.getDataSourceScheduler(); - dataSourceScheduler.schedule(dataSourceTask); - progress.progress("DataSource Ingest" + " " + inputName, ++processed); - - - /* Schedule the file-level ingest modules for the children of the data source */ - final DataSourceTask fTask = - new DataSourceTask(input, fileMods, getProcessUnallocSpace()); - - logger.log(Level.INFO, "Queing file ingest task: " + fTask); - progress.progress("File Ingest" + " " + inputName, processed); - final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); - fileScheduler.schedule(fTask); - progress.progress("File Ingest" + " " + inputName, ++processed); - - } //for data sources - } - private void handleInterruption(Exception ex) { - logger.log(Level.SEVERE, "Error while enqueing files. ", ex); - //empty queues + logger.log(Level.SEVERE, "Error while enqueing files. ", ex); // RJCTODO: Not really, could be routine cancellation scheduler.getFileScheduler().empty(); scheduler.getDataSourceScheduler().empty(); } } - private class FileIngester implements Runnable { + // RJCTODO: This is a work in progress, the replacement for IngestAbstractFileProcessor + private class FileIngestWorker implements Runnable { + private final int id; + + FileIngestWorker(int id) { + this.id = id; + } + @Override public void run() { - final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); + IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler(); while (fileScheduler.hasNext()) { - final FileTask fileTask = fileScheduler.next(); - // RJCTODO: fileTask.execute(thread id); - // In this method, the file task get the IngestPipelines object for - // the DataSourceTask and calls process(AbstractFile, thread id) - // The thread id allows the IngestPipelines to select the copy of the - // file ingest pipeline that is to be used by this thread. - // When the execute method completes, the scheduler needs to be notified... - // it asppears this is being done with an event. - // When the scheduler is all done with a DataSourceTask, all of the - // pipelines need a complete() call for thweir modules; stop also needs to - // be handled. - } - logger.log(Level.INFO, "IngestManager: Finished processing files"); + FileTask fileTask = fileScheduler.next(); + fileTask.execute(id); + } + +// logger.log(Level.INFO, "IngestManager: Finished processing files"); // RJCTODO: Not true } } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessage.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessage.java index de0da0cae3..42d18f8dc5 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessage.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessage.java @@ -40,7 +40,7 @@ public class IngestMessage { private long ID; private MessageType messageType; - private IngestModuleAbstract source; + private IngestModule source; private String subject; private String detailsHtml; private String uniqueKey; @@ -52,7 +52,7 @@ public class IngestMessage { /** * Private constructor used by factory methods */ - private IngestMessage(long ID, MessageType messageType, IngestModuleAbstract source, String subject, String detailsHtml, String uniqueKey) { + private IngestMessage(long ID, MessageType messageType, IngestModule source, String subject, String detailsHtml, String uniqueKey) { this.ID = ID; this.source = source; this.messageType = messageType; @@ -69,7 +69,7 @@ public class IngestMessage { return ID; } - public IngestModuleAbstract getSource() { + public IngestModule getSource() { return source; } @@ -104,7 +104,7 @@ public class IngestMessage { sb.append("type: ").append(messageType.name()); if (source != null) //can be null for manager messages { - sb.append(" source: ").append(source.getName()); + sb.append(" source: ").append(source.getDisplayName()); } sb.append(" date: ").append(dateFormat.format(datePosted)); sb.append(" subject: ").append(subject); @@ -174,7 +174,7 @@ public class IngestMessage { * @param detailsHtml html formatted detailed message (without leading and closing <html> tags), for instance, a human-readable representation of the data. Or null. * @return */ - public static IngestMessage createMessage(long ID, MessageType messageType, IngestModuleAbstract source, String subject, String detailsHtml) { + public static IngestMessage createMessage(long ID, MessageType messageType, IngestModule source, String subject, String detailsHtml) { if (messageType == null || source == null || subject == null) { throw new IllegalArgumentException("message type, source and subject cannot be null"); } @@ -189,7 +189,7 @@ public class IngestMessage { * @param subject message subject to be displayed * @return */ - public static IngestMessage createMessage(long ID, MessageType messageType, IngestModuleAbstract source, String subject) { + public static IngestMessage createMessage(long ID, MessageType messageType, IngestModule source, String subject) { return createMessage(ID, messageType, source, subject, null); } @@ -202,7 +202,7 @@ public class IngestMessage { * @param detailsHtml html formatted detailed message (without leading and closing <html> tags), for instance, a human-readable representation of the data. Or null * @return */ - public static IngestMessage createErrorMessage(long ID, IngestModuleAbstract source, String subject, String detailsHtml) { + public static IngestMessage createErrorMessage(long ID, IngestModule source, String subject, String detailsHtml) { if (source == null || subject == null) { throw new IllegalArgumentException("source and subject cannot be null"); } @@ -217,7 +217,7 @@ public class IngestMessage { * @param detailsHtml html formatted detailed message (without leading and closing <html> tags), for instance, a human-readable representation of the data. Or null * @return */ - public static IngestMessage createWarningMessage(long ID, IngestModuleAbstract source, String subject, String detailsHtml) { + public static IngestMessage createWarningMessage(long ID, IngestModule source, String subject, String detailsHtml) { if (source == null || subject == null) { throw new IllegalArgumentException("source and subject cannot be null"); } @@ -234,7 +234,7 @@ public class IngestMessage { * @param data blackboard artifact associated with the message, the same as fired in ModuleDataEvent by the module * @return */ - public static IngestMessage createDataMessage(long ID, IngestModuleAbstract source, String subject, String detailsHtml, String uniqueKey, BlackboardArtifact data) { + public static IngestMessage createDataMessage(long ID, IngestModule source, String subject, String detailsHtml, String uniqueKey, BlackboardArtifact data) { if (source == null || subject == null || detailsHtml == null || data == null) { throw new IllegalArgumentException("source, subject, details and data cannot be null"); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index 782737bb15..df48c91d80 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -366,7 +366,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { private String[] columnNames = new String[]{"Module", "Num", "New?", "Subject", "Timestamp"}; private List messageData = new ArrayList(); //for keeping track of messages to group, per module, by uniqness - private Map>> groupings = new HashMap>>(); + private Map>> groupings = new HashMap>>(); private boolean chronoSort = true; //chronological sort default private static final int MESSAGE_GROUP_THRESH = 3; //group messages after 3 messages per module with same uniqness private Logger logger = Logger.getLogger(MessageTableModel.class.getName()); @@ -378,12 +378,13 @@ class IngestMessagePanel extends JPanel implements TableModelListener { private void init() { final IngestManager manager = IngestManager.getDefault(); //initialize groupings map with modules - for (IngestModuleAbstract module : manager.enumerateAbstractFileModules()) { - groupings.put(module, new HashMap>()); - } - for (IngestModuleAbstract module : manager.enumerateDataSourceModules()) { - groupings.put(module, new HashMap>()); - } + // RJCTODO +// for (IngestModuleAbstract module : manager.enumerateAbstractFileModules()) { +// groupings.put(module, new HashMap>()); +// } +// for (IngestModuleAbstract module : manager.enumerateDataSourceModules()) { +// groupings.put(module, new HashMap>()); +// } } @@ -470,7 +471,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { switch (columnIndex) { case 0: Object module = entry.messageGroup.getSource(); - ret = module == null ? "" : entry.messageGroup.getSource().getName(); + ret = module == null ? "" : entry.messageGroup.getSource().getDisplayName(); break; case 1: ret = entry.messageGroup.getCount(); @@ -517,7 +518,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { synchronized public void addMessage(IngestMessage m) { //check how many messages per module with the same uniqness //and add to existing group or create a new group - IngestModuleAbstract module = m.getSource(); + IngestModule module = m.getSource(); IngestMessageGroup messageGroup = null; if (module != null && m.getMessageType() == IngestMessage.MessageType.DATA) { //not a manager message, a data message, then group @@ -781,7 +782,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { /* * return source module, should be the same for all msgs */ - IngestModuleAbstract getSource() { + IngestModule getSource() { return messages.get(0).getSource(); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index 4a2653effc..0f851c6369 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -25,21 +25,33 @@ package org.sleuthkit.autopsy.ingest; public interface IngestModule { /** - * Invoked to allow an ingest module to set up internal data structures and - * acquire any private resources it will need during a single ingest of a - * particular data source. IMPORTANT: There will usually be more than one - * instance of a module executing, but it is guaranteed that there will be - * no more than one instance of the module per thread. However, if these - * instances must share resources, the modules are responsible for - * synchronizing access to the shared resources and doing reference counting - * as required to release the resources correctly. - * @param dataSourceTaskId A module that uses the scheduling service to - * schedule additional processing needs to supply its data source task ID to - * the scheduler. For example, a module that extracts files from an archive - * discovered in a data source may schedule ingest of those files using the - * data source task ID. + * Invoked to obtain a display name for the module, i.e., a name that is + * suitable for presentation to a user in a user interface component or a + * log message. + * + * @return The display name of the module */ - void init(long dataSourceTaskId); + String getDisplayName(); + + /** + * Invoked to allow an ingest module to set up internal data structures and + * acquire any private resources it will need during ingest of a single + * data source. There will usually be more than one instance of a module + * executing, but it is guaranteed that there will be no more than one + * instance of the module per thread. If these instances must share + * resources, the modules are responsible for synchronizing access to the + * shared resources and doing reference counting as required to release + * the resources correctly. + *

+ * A module that uses the scheduling service to schedule additional + * processing needs to supply the task ID passed to this method to the + * scheduler. For example, a module that extracts files from an archive + * discovered in a data source may schedule ingest of those files using + * the task ID. + * + * @param taskId To be used to schedule ingest of derived files + */ + void init(long taskId); /** * Invoked when a single ingest of a particular data source is completed. @@ -52,7 +64,7 @@ public interface IngestModule { /** * Invoked when a single ingest of a particular data source is canceled. * The module should tear down internal data sources and release private - * resources, discard unsubmitted results, and post a final ingest message. + * resources, discard unrecorded results, and post a final ingest message. * The module will be discarded when this method returns. */ void stop(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstract.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstract.java deleted file mode 100644 index 6bf6b36f57..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstract.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011-2012 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.sleuthkit.autopsy.ingest; - - -/** - * Base interface for ingest modules - */ - abstract class IngestModuleAbstract { - - private String args; - - /** - * Possible module types for the implementing classes - */ - public enum ModuleType { - /** - * DataSource type module - */ - DataSource, - - /** - * AbstractFile type module - */ - AbstractFile - }; - - /** - * Invoked every time an ingest session is started by the framework. - * A module should support multiple invocations of init() throughout the application life-cycle. - * In this method, the module should reinitialize its internal objects and resources and get them ready - * for a brand new ingest processing. - * - * Here are some things you may do in this method if you'll need them later. - * - Get a handle to the ingest services using org.sleuthkit.autopsy.ingest.IngestServices.getDefault(). - * - Get the current case using org.sleuthkit.autopsy.ingest.IngestServices.getCurrentSleuthkitCaseDb(). - * - * NEVER initialize IngestServices handle in the member declaration, because it might result - * in multiple instances of the singleton -- different class loaders are used in different modules. - * @param initContext context used to initialize some modules - */ - abstract public void init(IngestModuleInit initContext); - - /** - * Invoked when an ingest session completes. - * The module should perform any resource (files, handles, caches) - * cleanup in this method and submit final results and post a final ingest inbox message. - */ - abstract public void complete(); - - /** - * Invoked on a module when an ingest session is interrupted by the user or system. - * The method implementation should be similar to complete() in that the - * module should perform any cleanup work. - * If there is pending data to be processed or pending results to be reported by the module - * then the results should be rejected and ignored and the method should return as early as possible. - * It should ensure it is in a defined state so that ingest can be rerun later. - */ - abstract public void stop(); - - /** - * Returns unique name of the module. Should not have collisions. - * @return unique module name - */ - abstract public String getName(); - - /** - * Gets the module version - * @return module version string - */ - abstract public String getVersion(); - - /** - * Gets user-friendly description of the module - * @return module description - */ - abstract public String getDescription(); - - /** - * Returns type of the module (data source-level or file-level) - * @return module type - */ - abstract public ModuleType getType(); - - - /** - * A module can manage and use additional threads to perform some work in the background. - * This method provides insight to the manager if the module has truly completed its work or not. - * - * - * @return true if any background threads/workers managed by this module are still running or are pending to be run, - * false if all work has been done, or if background threads are not used/managed by this module - */ - abstract public boolean hasBackgroundJobsRunning(); - - - /** - * Used to determine if a module has implemented a simple (run-time) - * configuration panel that is displayed by the ingest manager. - * - * @return true if this module has a simple (run-time) configuration - */ - public boolean hasSimpleConfiguration() { - return false; - } - - /** - * Used to determine if a module has implemented an advanced (general) - * configuration that can be used for more in-depth module configuration. - * - * @return true if this module has an advanced configuration - */ - public boolean hasAdvancedConfiguration() { - return false; - } - - /** - * Called by the ingest manager if the simple (run-time) configuration - * panel should save its current state so that the settings can be used - * during the ingest. - */ - public void saveSimpleConfiguration() {} - - /** - * If module implements advanced configuration panel - * it should read its current state and make it persistent / save it in this method - * so that the new configuration will be in effect during the ingest. - */ - public void saveAdvancedConfiguration() {} - - - /** - * Returns a panel that displays the simple (run-time) configuration for the - * given configuration context (such as pipeline instance). This is - * presented to the user before ingest starts and only basic settings should - * be given here. Use the advanced (general) configuration panel for more - * in-depth interfaces. The module (or its configuration controller object) - * is responsible for preserving / saving its configuration state In - * addition, saveSimpleConfiguration() can be used as the trigger. - * - * @param context the configuration context to use in the panel - * @return JPanel containing basic configuration widgets or null if simple - * configuration is not available - */ - public javax.swing.JPanel getSimpleConfiguration(String context) { - return null; - } - - /** - * Returns a panel that displays the advanced (run-time) configuration for - * the given configuration context (such as pipeline instance). Implements - * advanced module configuration exposed to the user before ingest starts. - * - * The module (or its configuration controller object) is responsible for - * preserving / saving its configuration state In addition, - * saveAdvancedConfiguration() can be used as the trigger. - * - * @param context the configuration context to use in the panel - * @return JPanel containing advanced configuration widgets or null if - * advanced configuration is not available - */ - public javax.swing.JPanel getAdvancedConfiguration(String context) { - return null; - } - } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstractFile.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstractFile.java deleted file mode 100644 index aa836763f6..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAbstractFile.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import org.sleuthkit.datamodel.AbstractFile; - -/** - * Ingest module interface that will be called for every file in the data source Content - */ -public abstract class IngestModuleAbstractFile extends IngestModuleAbstract { - - /** - * Return value resulting from processing AbstractFile - * If ERROR, can be used subsequent module - * in the pipeline as a hint to stop processing the file - */ - public enum ProcessResult { - OK, ///< Indicates that processing was successful (including if the file was largely ignored by the module) - ERROR, ///< Indicates that an error was encountered while processing the file, hint for later modules that depend on this module to skip processing the file due to error condition (such as file could not be read) - UNKNOWN ///< Indicates that a return value for the module is not known. This should not be returned directly by modules, but is used to indicate the module has not set its return value (e.g. it never ran) - }; - - @Override - public ModuleType getType() { - return ModuleType.AbstractFile; - } - - /** - * Entry point to process file / directory by the module. - * - * @param pipelineContext the context in which the ingest runs (with its own settings, modules, etc) - * @param abstractFile file to process - * @return ProcessResult result of the processing that can be used in the pipeline as a hint whether to further process this file - */ - abstract public ProcessResult process(PipelineContextpipelineContext, AbstractFile abstractFile); -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleDataSource.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleDataSource.java deleted file mode 100644 index 5ffdbc00b5..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleDataSource.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import org.sleuthkit.datamodel.Content; - -/** - * Ingest module that acts on entire image or set of logical files. - * These modules are for analysis tasks that do not operate on all files in the disk image or set of logical files. - * A new instance of this module will be created for each data source that is added. - * Therefore, data source-level modules can assume that the process() method will be called at most once after init() is called. - */ -public abstract class IngestModuleDataSource extends IngestModuleAbstract { - - @Override - public ModuleType getType() { - return ModuleType.DataSource; - } - - /** - * Called with the data source Content object to analyze. - * - * Modules typically use FileManager to get specific files to analyze. - * - * Results should be posted to the blackboard. - * The module should also send messages to the ingest inbox with interesting events (data, errors, warnings, infos). - * The module notifies data viewers by firing events using IngestManagerProxy.fireModuleDataEvent - * - * The module will have its own progress bar while it is running and it should update it with the IngestDataSourceWorkerController object. - * - * @param pipelineContext Context in which the ingest pipeline is running (Settings, modules, etc) - * @param dataSource data source to process (such as Image, VirtualDirectory for logical files etc, etc) - * @param controller Used to update progress bar and to check if the task has been canceled. - */ - abstract public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller); -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index e1c140e4ec..d8e33eed9b 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2012 Basis Technology Corp. + * Copyright 2012-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.sleuthkit.autopsy.ingest; import java.beans.PropertyChangeListener; @@ -66,6 +67,8 @@ import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; + +// RJCTODO: Rewrite comment, complete reworking of class /** * Class responsible for discovery and loading ingest modules specified in * pipeline XML file. Maintains a singleton instance. Requires restart of @@ -91,54 +94,53 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * code refactored */ final class IngestModuleLoader { + private static IngestModuleLoader instance; + private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); private ArrayList moduleFactories = new ArrayList<>(); - private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml"; - private static final String XSDFILE = "PipelineConfigSchema.xsd"; - private String absFilePath; - private static IngestModuleLoader instance; + private PropertyChangeSupport pcs; +// private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml"; +// private static final String XSDFILE = "PipelineConfigSchema.xsd"; +// private String absFilePath; //raw XML pipeline representation for validation - private final List pipelinesXML; +// private final List pipelinesXML; //validated pipelines with instantiated modules - private final List filePipeline; - private final List dataSourcePipeline; - private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); - private ClassLoader classLoader; - private PropertyChangeSupport pcs; - private static final String ENCODING = "UTF-8"; - private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; - private SimpleDateFormat dateFormatter; +// private final List filePipeline; +// private final List dataSourcePipeline; +// private ClassLoader classLoader; +// private static final String ENCODING = "UTF-8"; +// private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; +// private SimpleDateFormat dateFormatter; //used to specify default unique module order of autodiscovered modules //if not specified - private int numModDiscovered = 0; - private static String CUR_MODULES_DISCOVERED_SETTING = "curModulesDiscovered"; +// private int numModDiscovered = 0; +// private static String CUR_MODULES_DISCOVERED_SETTING = "curModulesDiscovered"; //events supported enum Event { - ModulesReloaded }; private IngestModuleLoader() { - pipelinesXML = new ArrayList(); - filePipeline = new ArrayList(); - dataSourcePipeline = new ArrayList(); - dateFormatter = new SimpleDateFormat(DATE_FORMAT); - - String numModDiscoveredStr = ModuleSettings.getConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING); - if (numModDiscoveredStr != null) { - try { - numModDiscovered = Integer.valueOf(numModDiscoveredStr); - } catch (NumberFormatException e) { - numModDiscovered = 0; - logger.log(Level.WARNING, "Could not parse numModDiscovered setting, defaulting to 0", e); - } - } +// pipelinesXML = new ArrayList<>(); +// filePipeline = new ArrayList(); +// dataSourcePipeline = new ArrayList(); +// dateFormatter = new SimpleDateFormat(DATE_FORMAT); +// +// String numModDiscoveredStr = ModuleSettings.getConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING); +// if (numModDiscoveredStr != null) { +// try { +// numModDiscovered = Integer.valueOf(numModDiscoveredStr); +// } catch (NumberFormatException e) { +// numModDiscovered = 0; +// logger.log(Level.WARNING, "Could not parse numModDiscovered setting, defaulting to 0", e); +// } +// } pcs = new PropertyChangeSupport(this); - registerModulesChange(); +// registerModulesChange(); } - synchronized static IngestModuleLoader getDefault() throws IngestModuleLoaderException { + synchronized static IngestModuleLoader getDefault() /*throws IngestModuleLoaderException*/ { if (instance == null) { logger.log(Level.INFO, "Creating ingest module loader instance"); instance = new IngestModuleLoader(); @@ -179,181 +181,181 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * * @throws IngestModuleLoaderException */ - private void validate() throws IngestModuleLoaderException { - for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) { - boolean pipelineErrors = false; +// private void validate() throws IngestModuleLoaderException { +// for (IngestModuleLoader.IngestPipelineXMLDescriptor pRaw : pipelinesXML) { +// boolean pipelineErrors = false; +// +// //check pipelineType +// String pipelineType = pRaw.type; +// +// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pType = null; +// +// try { +// pType = IngestModuleLoader.IngestPipelineXMLDescriptor.getPipelineType(pipelineType); +// } catch (IllegalArgumentException e) { +// pipelineErrors = true; +// logger.log(Level.SEVERE, "Unknown pipeline type: " + pipelineType); +// +// } +// //ordering store +// Map orderings = new HashMap(); +// +// for (IngestModuleLoader.IngestModuleXMLDescriptor pMod : pRaw.modules) { +// boolean moduleErrors = false; +// +// //record ordering for validation +// int order = pMod.order; +// if (orderings.containsKey(order)) { +// orderings.put(order, orderings.get(order) + 1); +// } else { +// orderings.put(order, 1); +// } +// +// //check pipelineType +// String modType = pMod.type; +// if (!modType.equals(IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString())) { +// moduleErrors = true; +// logger.log(Level.SEVERE, "Unknown module type: " + modType); +// } +// +// //classes exist and interfaces implemented +// String location = pMod.location; +// try { +// //netbeans uses custom class loader, otherwise can't load classes from other modules +// +// final Class moduleClass = Class.forName(location, false, classLoader); +// final Type intf = moduleClass.getGenericSuperclass(); +// +// if (pType != null) { +// Class moduleMeta = ((IngestModuleMapping) pType).getIngestModuleInterface(); +// String moduleIntNameCan = moduleMeta.getCanonicalName(); +// String[] moduleIntNameTok = moduleIntNameCan.split(" "); +// String moduleIntName = moduleIntNameTok[moduleIntNameTok.length - 1]; +// +// String intNameCan = intf.toString(); +// String[] intNameCanTok = intNameCan.split(" "); +// String intName = intNameCanTok[intNameCanTok.length - 1]; +// if (!intName.equals(moduleIntName)) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location +// + " does not implement correct interface: " + moduleMeta.getName() +// + " required for pipeline: " + pType.toString() +// + ", module will not be active."); +// } +// } else { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " does not implement any interface, module will not be active."); +// } +// +// //if file module: check if has public static getDefault() +// if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.FILE_ANALYSIS) { +// try { +// Method getDefaultMethod = moduleClass.getMethod("getDefault"); +// int modifiers = getDefaultMethod.getModifiers(); +// if (!(Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers))) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " does not implement public static getDefault() singleton method."); +// } +// if (!getDefaultMethod.getReturnType().equals(moduleClass)) { +// logger.log(Level.WARNING, "Module class: " + location + " getDefault() singleton method should return the module class instance: " + moduleClass.getName()); +// } +// +// } catch (NoSuchMethodException ex) { +// Exceptions.printStackTrace(ex); +// } catch (SecurityException ex) { +// Exceptions.printStackTrace(ex); +// } +// } //if data source module: check if has public constructor with no args +// else if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS) { +// try { +// Constructor constr = moduleClass.getConstructor(); +// int modifiers = constr.getModifiers(); +// if (!Modifier.isPublic(modifiers)) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); +// } +// } catch (NoSuchMethodException ex) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); +// } catch (SecurityException ex) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); +// } +// } +// +// } catch (ClassNotFoundException ex) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " not found, module will not be active."); +// +// } catch (LinkageError le) { +// moduleErrors = true; +// logger.log(Level.WARNING, "Module class: " + location + " has unresolved symbols, module will not be active.", le); +// } +// +// +// //validate ordering +// for (int o : orderings.keySet()) { +// int count = orderings.get(o); +// if (count > 1) { +// pipelineErrors = true; +// logger.log(Level.SEVERE, "Pipeline " + pipelineType + " invalid non-unique ordering of modules, order: " + o); +// } +// } +// +// pMod.valid = !moduleErrors; +// logger.log(Level.INFO, "Module " + pMod.location + " valid: " + pMod.valid); +// } //end module +// +// pRaw.valid = !pipelineErrors; +// logger.log(Level.INFO, "Pipeline " + pType + " valid: " + pRaw.valid); +// } //end pipeline +// +// } - //check pipelineType - String pipelineType = pRaw.type; - - IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = null; - - try { - pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pipelineType); - } catch (IllegalArgumentException e) { - pipelineErrors = true; - logger.log(Level.SEVERE, "Unknown pipeline type: " + pipelineType); - - } - //ordering store - Map orderings = new HashMap(); - - for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) { - boolean moduleErrors = false; - - //record ordering for validation - int order = pMod.order; - if (orderings.containsKey(order)) { - orderings.put(order, orderings.get(order) + 1); - } else { - orderings.put(order, 1); - } - - //check pipelineType - String modType = pMod.type; - if (!modType.equals(IngestModuleLoader.XmlModuleRaw.MODULE_TYPE.PLUGIN.toString())) { - moduleErrors = true; - logger.log(Level.SEVERE, "Unknown module type: " + modType); - } - - //classes exist and interfaces implemented - String location = pMod.location; - try { - //netbeans uses custom class loader, otherwise can't load classes from other modules - - final Class moduleClass = Class.forName(location, false, classLoader); - final Type intf = moduleClass.getGenericSuperclass(); - - if (pType != null) { - Class moduleMeta = ((IngestModuleMapping) pType).getIngestModuleInterface(); - String moduleIntNameCan = moduleMeta.getCanonicalName(); - String[] moduleIntNameTok = moduleIntNameCan.split(" "); - String moduleIntName = moduleIntNameTok[moduleIntNameTok.length - 1]; - - String intNameCan = intf.toString(); - String[] intNameCanTok = intNameCan.split(" "); - String intName = intNameCanTok[intNameCanTok.length - 1]; - if (!intName.equals(moduleIntName)) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location - + " does not implement correct interface: " + moduleMeta.getName() - + " required for pipeline: " + pType.toString() - + ", module will not be active."); - } - } else { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " does not implement any interface, module will not be active."); - } - - //if file module: check if has public static getDefault() - if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS) { - try { - Method getDefaultMethod = moduleClass.getMethod("getDefault"); - int modifiers = getDefaultMethod.getModifiers(); - if (!(Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers))) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " does not implement public static getDefault() singleton method."); - } - if (!getDefaultMethod.getReturnType().equals(moduleClass)) { - logger.log(Level.WARNING, "Module class: " + location + " getDefault() singleton method should return the module class instance: " + moduleClass.getName()); - } - - } catch (NoSuchMethodException ex) { - Exceptions.printStackTrace(ex); - } catch (SecurityException ex) { - Exceptions.printStackTrace(ex); - } - } //if data source module: check if has public constructor with no args - else if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS) { - try { - Constructor constr = moduleClass.getConstructor(); - int modifiers = constr.getModifiers(); - if (!Modifier.isPublic(modifiers)) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); - } - } catch (NoSuchMethodException ex) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); - } catch (SecurityException ex) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); - } - } - - } catch (ClassNotFoundException ex) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " not found, module will not be active."); - - } catch (LinkageError le) { - moduleErrors = true; - logger.log(Level.WARNING, "Module class: " + location + " has unresolved symbols, module will not be active.", le); - } - - - //validate ordering - for (int o : orderings.keySet()) { - int count = orderings.get(o); - if (count > 1) { - pipelineErrors = true; - logger.log(Level.SEVERE, "Pipeline " + pipelineType + " invalid non-unique ordering of modules, order: " + o); - } - } - - pMod.valid = !moduleErrors; - logger.log(Level.INFO, "Module " + pMod.location + " valid: " + pMod.valid); - } //end module - - pRaw.valid = !pipelineErrors; - logger.log(Level.INFO, "Pipeline " + pType + " valid: " + pRaw.valid); - } //end pipeline - - } - - private Set getJarPaths(String modulesDir) { - Set urls = new HashSet(); - - final File modulesDirF = new File(modulesDir); - FilenameFilter jarFilter = new FilenameFilter() { - @Override - public boolean accept(File dir, String name) { - return dir.equals(modulesDirF) && name.endsWith(".jar"); - } - }; - File[] dirJars = modulesDirF.listFiles(jarFilter); - if (dirJars != null) { - //modules dir exists - for (int i = 0; i < dirJars.length; ++i) { - String urlPath = "file:/" + dirJars[i].getAbsolutePath(); - try { - urlPath = URLDecoder.decode(urlPath, ENCODING); - } catch (UnsupportedEncodingException ex) { - logger.log(Level.SEVERE, "Could not decode file path. ", ex); - } - - try { - urls.add(new URL(urlPath)); - //logger.log(Level.INFO, "JAR: " + urlPath); - } catch (MalformedURLException ex) { - logger.log(Level.WARNING, "Invalid URL: " + urlPath, ex); - } - } - } - - /* - * netbeans way, but not public API - org.openide.filesystems.Repository defaultRepository = Repository.getDefault(); - FileSystem masterFilesystem = defaultRepository.getDefaultFileSystem(); - org.netbeans.core.startup.ModuleSystem moduleSystem = new org.netbeans.core.startup.ModuleSystem(masterFilesystem); - List jars = moduleSystem.getModuleJars(); - for (File jar : jars) { - logger.log(Level.INFO, " JAR2: " + jar.getAbsolutePath()); - } - //org.netbeans.ModuleManager moduleManager = moduleSystem.getManager(); - */ - - return urls; - } +// private Set getJarPaths(String modulesDir) { +// Set urls = new HashSet(); +// +// final File modulesDirF = new File(modulesDir); +// FilenameFilter jarFilter = new FilenameFilter() { +// @Override +// public boolean accept(File dir, String name) { +// return dir.equals(modulesDirF) && name.endsWith(".jar"); +// } +// }; +// File[] dirJars = modulesDirF.listFiles(jarFilter); +// if (dirJars != null) { +// //modules dir exists +// for (int i = 0; i < dirJars.length; ++i) { +// String urlPath = "file:/" + dirJars[i].getAbsolutePath(); +// try { +// urlPath = URLDecoder.decode(urlPath, ENCODING); +// } catch (UnsupportedEncodingException ex) { +// logger.log(Level.SEVERE, "Could not decode file path. ", ex); +// } +// +// try { +// urls.add(new URL(urlPath)); +// //logger.log(Level.INFO, "JAR: " + urlPath); +// } catch (MalformedURLException ex) { +// logger.log(Level.WARNING, "Invalid URL: " + urlPath, ex); +// } +// } +// } +// +// /* +// * netbeans way, but not public API +// org.openide.filesystems.Repository defaultRepository = Repository.getDefault(); +// FileSystem masterFilesystem = defaultRepository.getDefaultFileSystem(); +// org.netbeans.core.startup.ModuleSystem moduleSystem = new org.netbeans.core.startup.ModuleSystem(masterFilesystem); +// List jars = moduleSystem.getModuleJars(); +// for (File jar : jars) { +// logger.log(Level.INFO, " JAR2: " + jar.getAbsolutePath()); +// } +// //org.netbeans.ModuleManager moduleManager = moduleSystem.getManager(); +// */ +// +// return urls; +// } /** * Get jar paths of autodiscovered modules @@ -361,100 +363,100 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * @param moduleInfos to look into to discover module jar paths * @return */ - private Set getJarPaths(Collection moduleInfos) { - Set urls = new HashSet(); - - //TODO lookup module jar file paths by "seed" class or resource, using the module loader - //problem: we don't have a reliable "seed" class in every moduke - //and loading by Bundle.properties resource does not seem to work with the module class loader - //for now hardcoding jar file locations - - /* - for (ModuleInfo moduleInfo : moduleInfos) { - - if (moduleInfo.isEnabled() == false) { - continue; - } - - String basePackageName = moduleInfo.getCodeNameBase(); - if (basePackageName.startsWith("org.netbeans") - || basePackageName.startsWith("org.openide")) { - //skip - continue; - } - - - ClassLoader moduleClassLoader = moduleInfo.getClassLoader(); - - URL modURL = moduleClassLoader.getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL1 : " + modURL); - - modURL = moduleClassLoader.getParent().getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL2 : " + modURL); - - modURL = classLoader.getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL3 : " + modURL); - } */ - /* - URL modURL = moduleClassLoader.getParent().getResource("Bundle.properties"); - //URL modURL = classLoader.getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL : " + modURL); - - modURL = moduleClassLoader.getResource(basePackageName + ".Bundle.properties"); - //URL modURL = classLoader.getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL : " + modURL); - - modURL = moduleClassLoader.getResource("Bundle"); - //URL modURL = classLoader.getResource(basePackageName); - logger.log(Level.INFO, "GOT MOD URL : " + modURL); - - Class modClass; - try { - modClass = classLoader.loadClass(basePackageName + ".Installer"); - URL modURL2 = modClass.getProtectionDomain().getCodeSource().getLocation(); - logger.log(Level.INFO, "GOT MOD URL2 : " + modURL2); - } catch (ClassNotFoundException ex) { - // Exceptions.printStackTrace(ex); - } - try { - Class moduleBundleClass = - Class.forName(basePackageName, false, classLoader); - URL modURL3 = moduleBundleClass.getProtectionDomain().getCodeSource().getLocation(); - logger.log(Level.INFO, "GOT MOD URL3 : " + modURL3); - } catch (ClassNotFoundException ex) { - // Exceptions.printStackTrace(ex); - } - - - URL urltry; - try { - urltry = moduleClassLoader.loadClass("Bundle").getProtectionDomain().getCodeSource().getLocation(); - logger.log(Level.INFO, "GOT TRY URL : " + urltry); - } catch (ClassNotFoundException ex) { - // Exceptions.printStackTrace(ex); - } - - } - * */ - - //core modules - urls.addAll(getJarPaths(PlatformUtil.getInstallModulesPath())); - - //user modules - urls.addAll(getJarPaths(PlatformUtil.getUserModulesPath())); - - // add other project dirs, such as from external modules - for (String projectDir : PlatformUtil.getProjectsDirs()) { - File modules = new File(projectDir + File.separator + "modules"); - if (modules.exists()) { - urls.addAll(getJarPaths(modules.getAbsolutePath())); - } - } - - - - return urls; - } +// private Set getJarPaths(Collection moduleInfos) { +// Set urls = new HashSet(); +// +// //TODO lookup module jar file paths by "seed" class or resource, using the module loader +// //problem: we don't have a reliable "seed" class in every moduke +// //and loading by Bundle.properties resource does not seem to work with the module class loader +// //for now hardcoding jar file locations +// +// /* +// for (ModuleInfo moduleInfo : moduleInfos) { +// +// if (moduleInfo.isEnabled() == false) { +// continue; +// } +// +// String basePackageName = moduleInfo.getCodeNameBase(); +// if (basePackageName.startsWith("org.netbeans") +// || basePackageName.startsWith("org.openide")) { +// //skip +// continue; +// } +// +// +// ClassLoader moduleClassLoader = moduleInfo.getClassLoader(); +// +// URL modURL = moduleClassLoader.getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL1 : " + modURL); +// +// modURL = moduleClassLoader.getParent().getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL); +// +// modURL = classLoader.getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL); +// } */ +// /* +// URL modURL = moduleClassLoader.getParent().getResource("Bundle.properties"); +// //URL modURL = classLoader.getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL : " + modURL); +// +// modURL = moduleClassLoader.getResource(basePackageName + ".Bundle.properties"); +// //URL modURL = classLoader.getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL : " + modURL); +// +// modURL = moduleClassLoader.getResource("Bundle"); +// //URL modURL = classLoader.getResource(basePackageName); +// logger.log(Level.INFO, "GOT MOD URL : " + modURL); +// +// Class modClass; +// try { +// modClass = classLoader.loadClass(basePackageName + ".Installer"); +// URL modURL2 = modClass.getProtectionDomain().getCodeSource().getLocation(); +// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL2); +// } catch (ClassNotFoundException ex) { +// // Exceptions.printStackTrace(ex); +// } +// try { +// Class moduleBundleClass = +// Class.forName(basePackageName, false, classLoader); +// URL modURL3 = moduleBundleClass.getProtectionDomain().getCodeSource().getLocation(); +// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL3); +// } catch (ClassNotFoundException ex) { +// // Exceptions.printStackTrace(ex); +// } +// +// +// URL urltry; +// try { +// urltry = moduleClassLoader.loadClass("Bundle").getProtectionDomain().getCodeSource().getLocation(); +// logger.log(Level.INFO, "GOT TRY URL : " + urltry); +// } catch (ClassNotFoundException ex) { +// // Exceptions.printStackTrace(ex); +// } +// +// } +// * */ +// +// //core modules +// urls.addAll(getJarPaths(PlatformUtil.getInstallModulesPath())); +// +// //user modules +// urls.addAll(getJarPaths(PlatformUtil.getUserModulesPath())); +// +// // add other project dirs, such as from external modules +// for (String projectDir : PlatformUtil.getProjectsDirs()) { +// File modules = new File(projectDir + File.separator + "modules"); +// if (modules.exists()) { +// urls.addAll(getJarPaths(modules.getAbsolutePath())); +// } +// } +// +// +// +// return urls; +// } List getIngestModuleFactories() { return moduleFactories; @@ -467,12 +469,12 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * * @throws IngestModuleLoaderException */ - @SuppressWarnings("unchecked") - private void autodiscover() throws IngestModuleLoaderException { +// @SuppressWarnings("unchecked") + private void autodiscover() /*throws IngestModuleLoaderException*/ { Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); for (IngestModuleFactory factory : factories) { - logger.log(Level.INFO, "Loaded ingest module factory: name = " + factory.getModuleDisplayName() + ", version = " + factory.getModuleVersionNumber()); + logger.log(Level.INFO, "Loaded ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()}); moduleFactories.add(factory); } @@ -635,9 +637,9 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * class path * @param newOrder new order to set */ - void setModuleOrder(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException { - throw new IngestModuleLoaderException("Not yet implemented"); - } +// void setModuleOrder(IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException { +// throw new IngestModuleLoaderException("Not yet implemented"); +// } /** * add autodiscovered module to raw pipeline to be validated and @@ -646,99 +648,100 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * @param moduleClass * @param pipelineType */ - private void addModuleToRawPipeline(Class moduleClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException { - String moduleLocation = moduleClass.getName(); - - IngestModuleLoader.XmlModuleRaw modRaw = new IngestModuleLoader.XmlModuleRaw(); - modRaw.arguments = ""; //default, no arguments - modRaw.location = moduleLocation; - modRaw.order = Integer.MAX_VALUE - (numModDiscovered++); //add to end - modRaw.type = IngestModuleLoader.XmlModuleRaw.MODULE_TYPE.PLUGIN.toString(); - modRaw.valid = false; //to be validated - - //save the current numModDiscovered - ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); - - //find the pipeline of that type - IngestModuleLoader.XmlPipelineRaw pipeline = null; - for (IngestModuleLoader.XmlPipelineRaw rawP : this.pipelinesXML) { - if (rawP.type.equals(pipelineType.toString())) { - pipeline = rawP; - break; - } - } - if (pipeline == null) { - throw new IngestModuleLoaderException("Could not find expected pipeline of type: " + pipelineType.toString() + ", cannot add autodiscovered module: " + moduleLocation); - } else { - pipeline.modules.add(modRaw); - logger.log(Level.INFO, "Added a new module " + moduleClass.getName() + " to pipeline " + pipelineType.toString()); - } - } +// private void addModuleToRawPipeline(Class moduleClass, IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException { +// String moduleLocation = moduleClass.getName(); +// +// IngestModuleLoader.IngestModuleXMLDescriptor modRaw = new IngestModuleLoader.IngestModuleXMLDescriptor(); +// modRaw.arguments = ""; //default, no arguments +// modRaw.location = moduleLocation; +// modRaw.order = Integer.MAX_VALUE - (numModDiscovered++); //add to end +// modRaw.type = IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString(); +// modRaw.valid = false; //to be validated +// +// //save the current numModDiscovered +// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); +// +// //find the pipeline of that type +// IngestModuleLoader.IngestPipelineXMLDescriptor pipeline = null; +// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { +// if (rawP.type.equals(pipelineType.toString())) { +// pipeline = rawP; +// break; +// } +// } +// if (pipeline == null) { +// throw new IngestModuleLoaderException("Could not find expected pipeline of type: " + pipelineType.toString() + ", cannot add autodiscovered module: " + moduleLocation); +// } else { +// pipeline.modules.add(modRaw); +// logger.log(Level.INFO, "Added a new module " + moduleClass.getName() + " to pipeline " + pipelineType.toString()); +// } +// } /** * Register a listener for module install/uninstall //TODO ensure that * module is actually loadable when Lookup event is fired */ - private void registerModulesChange() { - final Lookup.Result result = - Lookup.getDefault().lookupResult(ModuleInfo.class); - result.addLookupListener(new LookupListener() { - @Override - public void resultChanged(LookupEvent event) { - try { - logger.log(Level.INFO, "Module change occured, reloading."); - init(); - } catch (IngestModuleLoaderException ex) { - logger.log(Level.SEVERE, "Error reloading the module loader. ", ex); - } - } - }); - } +// private void registerModulesChange() { +// final Lookup.Result result = +// Lookup.getDefault().lookupResult(ModuleInfo.class); +// result.addLookupListener(new LookupListener() { +// @Override +// public void resultChanged(LookupEvent event) { +// try { +// logger.log(Level.INFO, "Module change occured, reloading."); +// init(); +// } catch (IngestModuleLoaderException ex) { +// logger.log(Level.SEVERE, "Error reloading the module loader. ", ex); +// } +// } +// }); +// } + // RJCTODO: This is not used /** * Save the current in memory pipeline config, including autodiscovered * modules * * @throws IngestModuleLoaderException */ - public void save() throws IngestModuleLoaderException { - DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); - - try { - DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); - Document doc = docBuilder.newDocument(); - - - Comment comment = doc.createComment("Saved by: " + getClass().getName() - + " on: " + dateFormatter.format(System.currentTimeMillis())); - doc.appendChild(comment); - Element rootEl = doc.createElement(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_ROOT); - doc.appendChild(rootEl); - - for (IngestModuleLoader.XmlPipelineRaw rawP : this.pipelinesXML) { - Element pipelineEl = doc.createElement(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_EL); - pipelineEl.setAttribute(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR, rawP.type); - rootEl.appendChild(pipelineEl); - - for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { - Element moduleEl = doc.createElement(IngestModuleLoader.XmlModuleRaw.XML_MODULE_EL); - - moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_LOC_ATTR, rawM.location); - moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type); - moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order)); - moduleEl.setAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR, rawM.type); - - pipelineEl.appendChild(moduleEl); - } - } - - XMLUtil.saveDoc(IngestModuleLoader.class, absFilePath, ENCODING, doc); - logger.log(Level.INFO, "Pipeline configuration saved to: " + this.absFilePath); - } catch (ParserConfigurationException e) { - logger.log(Level.SEVERE, "Error saving pipeline config XML: can't initialize parser.", e); - } - - } +// public void save() throws IngestModuleLoaderException { +// DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); +// +// try { +// DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); +// Document doc = docBuilder.newDocument(); +// +// +// Comment comment = doc.createComment("Saved by: " + getClass().getName() +// + " on: " + dateFormatter.format(System.currentTimeMillis())); +// doc.appendChild(comment); +// Element rootEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_ROOT); +// doc.appendChild(rootEl); +// +// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { +// Element pipelineEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); +// pipelineEl.setAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR, rawP.type); +// rootEl.appendChild(pipelineEl); +// +// for (IngestModuleLoader.IngestModuleXMLDescriptor rawM : rawP.modules) { +// Element moduleEl = doc.createElement(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); +// +// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR, rawM.location); +// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); +// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order)); +// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); +// +// pipelineEl.appendChild(moduleEl); +// } +// } +// +// XMLUtil.saveDoc(IngestModuleLoader.class, absFilePath, ENCODING, doc); +// logger.log(Level.INFO, "Pipeline configuration saved to: " + this.absFilePath); +// } catch (ParserConfigurationException e) { +// logger.log(Level.SEVERE, "Error saving pipeline config XML: can't initialize parser.", e); +// } +// +// } /** * Instantiate valid pipeline and modules and store the module object @@ -746,8 +749,8 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * * @throws IngestModuleLoaderException */ - @SuppressWarnings("unchecked") - private void instantiate() throws IngestModuleLoaderException { +// @SuppressWarnings("unchecked") + private void instantiate() /*throws IngestModuleLoaderException*/ { //clear current // filePipeline.clear(); @@ -849,97 +852,97 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * @param module existing module to get an instance of * @return new module instance or null if could not be created */ - IngestModuleAbstract getNewIngestModuleInstance(IngestModuleAbstract module) { - try { - IngestModuleAbstract newInstance = module.getClass().newInstance(); - return newInstance; - } catch (InstantiationException e) { - logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); - return null; - } catch (IllegalAccessException e) { - logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); - return null; - } +// IngestModuleAbstract getNewIngestModuleInstance(IngestModuleAbstract module) { +// try { +// IngestModuleAbstract newInstance = module.getClass().newInstance(); +// return newInstance; +// } catch (InstantiationException e) { +// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); +// return null; +// } catch (IllegalAccessException e) { +// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); +// return null; +// } +// +// } - } - - private IngestModuleAbstract getNewIngestModuleInstance(Class moduleClass) { - try { - IngestModuleAbstract newInstance = moduleClass.newInstance(); - return newInstance; - } catch (InstantiationException e) { - logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); - return null; - } catch (IllegalAccessException e) { - logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); - return null; - } - - } +// private IngestModuleAbstract getNewIngestModuleInstance(Class moduleClass) { +// try { +// IngestModuleAbstract newInstance = moduleClass.newInstance(); +// return newInstance; +// } catch (InstantiationException e) { +// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); +// return null; +// } catch (IllegalAccessException e) { +// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); +// return null; +// } +// +// } /** * Load XML into raw pipeline representation * * @throws IngestModuleLoaderException */ - private void loadRawPipeline() throws IngestModuleLoaderException { - final Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, absFilePath, XSDFILE); - if (doc == null) { - throw new IngestModuleLoaderException("Could not load pipeline config XML: " + this.absFilePath); - } - Element root = doc.getDocumentElement(); - if (root == null) { - String msg = "Error loading pipeline configuration: invalid file format."; - logger.log(Level.SEVERE, msg); - throw new IngestModuleLoaderException(msg); - } - NodeList pipelineNodes = root.getElementsByTagName(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_EL); - int numPipelines = pipelineNodes.getLength(); - if (numPipelines == 0) { - throw new IngestModuleLoaderException("No pipelines found in the pipeline configuration: " + absFilePath); - } - for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { - //process pipelines - Element pipelineEl = (Element) pipelineNodes.item(pipelineNum); - final String pipelineType = pipelineEl.getAttribute(IngestModuleLoader.XmlPipelineRaw.XML_PIPELINE_TYPE_ATTR); - logger.log(Level.INFO, "Found pipeline type: " + pipelineType); - - IngestModuleLoader.XmlPipelineRaw pipelineRaw = new IngestModuleLoader.XmlPipelineRaw(); - pipelineRaw.type = pipelineType; - this.pipelinesXML.add(pipelineRaw); - - //process modules - NodeList modulesNodes = pipelineEl.getElementsByTagName(IngestModuleLoader.XmlModuleRaw.XML_MODULE_EL); - int numModules = modulesNodes.getLength(); - if (numModules == 0) { - logger.log(Level.WARNING, "Pipeline: " + pipelineType + " has no modules defined."); - } - for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { - //process modules - Element moduleEl = (Element) modulesNodes.item(moduleNum); - final String moduleType = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_TYPE_ATTR); - final String moduleOrder = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ORDER_ATTR); - final String moduleLoc = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_LOC_ATTR); - final String moduleArgs = moduleEl.getAttribute(IngestModuleLoader.XmlModuleRaw.XML_MODULE_ARGS_ATTR); - IngestModuleLoader.XmlModuleRaw module = new IngestModuleLoader.XmlModuleRaw(); - module.arguments = moduleArgs; - module.location = moduleLoc; - try { - module.order = Integer.parseInt(moduleOrder); - } catch (NumberFormatException e) { - logger.log(Level.WARNING, "Invalid module order, need integer: " + moduleOrder + ", adding to end of the list"); - module.order = Integer.MAX_VALUE - (numModDiscovered++); - //save the current numModDiscovered - ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); - - } - module.type = moduleType; - pipelineRaw.modules.add(module); - } - - } - - } +// private void loadRawPipeline() throws IngestModuleLoaderException { +// final Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, absFilePath, XSDFILE); +// if (doc == null) { +// throw new IngestModuleLoaderException("Could not load pipeline config XML: " + this.absFilePath); +// } +// Element root = doc.getDocumentElement(); +// if (root == null) { +// String msg = "Error loading pipeline configuration: invalid file format."; +// logger.log(Level.SEVERE, msg); +// throw new IngestModuleLoaderException(msg); +// } +// NodeList pipelineNodes = root.getElementsByTagName(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); +// int numPipelines = pipelineNodes.getLength(); +// if (numPipelines == 0) { +// throw new IngestModuleLoaderException("No pipelines found in the pipeline configuration: " + absFilePath); +// } +// for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { +// //process pipelines +// Element pipelineEl = (Element) pipelineNodes.item(pipelineNum); +// final String pipelineType = pipelineEl.getAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR); +// logger.log(Level.INFO, "Found pipeline type: " + pipelineType); +// +// IngestModuleLoader.IngestPipelineXMLDescriptor pipelineRaw = new IngestModuleLoader.IngestPipelineXMLDescriptor(); +// pipelineRaw.type = pipelineType; +// this.pipelinesXML.add(pipelineRaw); +// +// //process modules +// NodeList modulesNodes = pipelineEl.getElementsByTagName(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); +// int numModules = modulesNodes.getLength(); +// if (numModules == 0) { +// logger.log(Level.WARNING, "Pipeline: " + pipelineType + " has no modules defined."); +// } +// for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { +// //process modules +// Element moduleEl = (Element) modulesNodes.item(moduleNum); +// final String moduleType = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR); +// final String moduleOrder = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR); +// final String moduleLoc = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR); +// final String moduleArgs = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ARGS_ATTR); +// IngestModuleLoader.IngestModuleXMLDescriptor module = new IngestModuleLoader.IngestModuleXMLDescriptor(); +// module.arguments = moduleArgs; +// module.location = moduleLoc; +// try { +// module.order = Integer.parseInt(moduleOrder); +// } catch (NumberFormatException e) { +// logger.log(Level.WARNING, "Invalid module order, need integer: " + moduleOrder + ", adding to end of the list"); +// module.order = Integer.MAX_VALUE - (numModDiscovered++); +// //save the current numModDiscovered +// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); +// +// } +// module.type = moduleType; +// pipelineRaw.modules.add(module); +// } +// +// } +// +// } /** * Load and validate XML pipeline, autodiscover and instantiate the pipeline @@ -947,7 +950,7 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; * * @throws IngestModuleLoaderException */ - public synchronized void init() throws IngestModuleLoaderException { + public synchronized void init() /*throws IngestModuleLoaderException*/ { // absFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINE_CONFIG_XML; // ClassLoader parentClassLoader = Lookup.getDefault().lookup(ClassLoader.class); // classLoader = new CustomClassLoader(parentClassLoader); @@ -966,146 +969,125 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; } - /** - * Get loaded file modules - * - * @return file modules loaded - */ - public List getAbstractFileIngestModules() { - return filePipeline; - } - - /** - * Get loaded data source modules - * - * @return data source modules loaded - */ - public List getDataSourceIngestModules() { - return dataSourcePipeline; - } - - //pipeline XML representation - private static final class XmlPipelineRaw { - - enum PIPELINE_TYPE implements IngestModuleMapping { - - FILE_ANALYSIS { - @Override - public String toString() { - return "FileAnalysis"; - } - - @Override - public Class getIngestModuleInterface() { - return IngestModuleAbstractFile.class; - } - }, - DATA_SOURCE_ANALYSIS { - @Override - public String toString() { - return "ImageAnalysis"; - } - - @Override - public Class getIngestModuleInterface() { - return IngestModuleDataSource.class; - } - },; - } - - /** - * get pipeline type for string mapping to type toString() method - * - * @param s string equals to one of the types toString() representation - * @return matching type - */ - static IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException { - IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE[] types = IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.values(); - for (int i = 0; i < types.length; ++i) { - if (types[i].toString().equals(s)) { - return types[i]; - } - } - throw new IllegalArgumentException("No PIPELINE_TYPE for string: " + s); - } - private static final String XML_PIPELINE_ROOT = "PIPELINE_CONFIG"; - private static final String XML_PIPELINE_EL = "PIPELINE"; - private static final String XML_PIPELINE_TYPE_ATTR = "type"; - String type; - List modules = new ArrayList(); - boolean valid = false; // if passed validation - } - - private static final class XmlModuleRaw { - - enum MODULE_TYPE { - - PLUGIN { - @Override - public String toString() { - return "plugin"; - } - },; - } - //XML tags and attributes - private static final String XML_MODULE_EL = "MODULE"; - private static final String XML_MODULE_ORDER_ATTR = "order"; - private static final String XML_MODULE_TYPE_ATTR = "type"; - private static final String XML_MODULE_LOC_ATTR = "location"; - private static final String XML_MODULE_ARGS_ATTR = "arguments"; - int order; - String type; - String location; - String arguments; - boolean valid = false; // if passed validation - } -} +// private static final class IngestPipelineXMLDescriptor { +// +// enum PIPELINE_TYPE implements IngestModuleMapping { +// +// FILE_ANALYSIS { +// @Override +// public String toString() { +// return "FileAnalysis"; +// } +// +// @Override +// public Class getIngestModuleInterface() { +// return IngestModuleAbstractFile.class; +// } +// }, +// DATA_SOURCE_ANALYSIS { +// @Override +// public String toString() { +// return "ImageAnalysis"; +// } +// +// @Override +// public Class getIngestModuleInterface() { +// return IngestModuleDataSource.class; +// } +// },; +// } +// +// /** +// * get pipeline type for string mapping to type toString() method +// * +// * @param s string equals to one of the types toString() representation +// * @return matching type +// */ +// static IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException { +// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE[] types = IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.values(); +// for (int i = 0; i < types.length; ++i) { +// if (types[i].toString().equals(s)) { +// return types[i]; +// } +// } +// throw new IllegalArgumentException("No PIPELINE_TYPE for string: " + s); +// } +// private static final String XML_PIPELINE_ROOT = "PIPELINE_CONFIG"; +// private static final String XML_PIPELINE_EL = "PIPELINE"; +// private static final String XML_PIPELINE_TYPE_ATTR = "type"; +// String type; +// List modules = new ArrayList<>(); +// boolean valid = false; // if passed validation +// } +// +// private static class IngestModuleXMLDescriptor { +// +// enum MODULE_TYPE { +// PLUGIN { +// @Override +// public String toString() { +// return "plugin"; +// } +// }; +// } +// +// private static final String XML_MODULE_EL = "MODULE"; +// private static final String XML_MODULE_ORDER_ATTR = "order"; +// private static final String XML_MODULE_TYPE_ATTR = "type"; +// private static final String XML_MODULE_LOC_ATTR = "location"; +// private static final String XML_MODULE_ARGS_ATTR = "arguments"; +// int order; +// String type; +// String location; +// String arguments; +// boolean valid = false; // if passed validation +// } +//} /** * Exception thrown when errors occur while loading modules */ -class IngestModuleLoaderException extends Throwable { - - public IngestModuleLoaderException(String message) { - super(message); - } - - public IngestModuleLoaderException(String message, Throwable cause) { - super(message, cause); - } -} +//class IngestModuleLoaderException extends Throwable { +// +// public IngestModuleLoaderException(String message) { +// super(message); +// } +// +// public IngestModuleLoaderException(String message, Throwable cause) { +// super(message, cause); +// } +//} /** * Implements mapping of a type to ingest module interface type */ -interface IngestModuleMapping { - - /** - * Get ingest module interface mapped to that type - * - * @return ingest module interface meta type - */ - public Class getIngestModuleInterface(); -} +//interface IngestModuleMapping { +// +// /** +// * Get ingest module interface mapped to that type +// * +// * @return ingest module interface meta type +// */ +// public Class getIngestModuleInterface(); +//} /** * Custom class loader that attempts to force class resolution / linkage validation at loading */ -class CustomClassLoader extends ClassLoader { - private static final Logger logger = Logger.getLogger(CustomClassLoader.class.getName()); - - CustomClassLoader(ClassLoader parent) { - super(parent); - } - - - @Override - public Class loadClass(String name) throws ClassNotFoundException { - logger.log(Level.INFO, "Custom loading class: " + name); - - Class cl = super.loadClass(name, true); - - return cl; - } - +//class CustomClassLoader extends ClassLoader { +// private static final Logger logger = Logger.getLogger(CustomClassLoader.class.getName()); +// +// CustomClassLoader(ClassLoader parent) { +// super(parent); +// } +// +// +// @Override +// public Class loadClass(String name) throws ClassNotFoundException { +// logger.log(Level.INFO, "Custom loading class: " + name); +// +// Class cl = super.loadClass(name, true); +// +// return cl; +// } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java index 95ed6163ae..8d7cfafe9c 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelines.java @@ -22,67 +22,80 @@ package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; import java.util.List; import java.io.Serializable; +import java.util.concurrent.ConcurrentHashMap; import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.autopsy.ingest.IngestScheduler.FileScheduler.FileTask; +import org.sleuthkit.datamodel.Content; /** - * RJCTODO: + * RJCTODO */ -public class IngestPipelines { +class IngestPipelines { private final long dataSourceTaskId; - private List fileIngestPipelineTemplate = new ArrayList<>(); - private List dataSourceIngestPipelineTemplate = new ArrayList<>(); + private final List ingestModuleTemplates; + private final ConcurrentHashMap fileIngestPipelines = new ConcurrentHashMap<>(); + private final ConcurrentHashMap dataSourceIngestPipelines = new ConcurrentHashMap<>(); - IngestPipelines(long dataSourceTaskId, List moduleTemplates) { + IngestPipelines(long dataSourceTaskId, final List ingestModuleTemplates) { this.dataSourceTaskId = dataSourceTaskId; - for (IngestModuleTemplate moduleTemplate : moduleTemplates) { - if (moduleTemplate.getIngestModuleFactory().isFileIngestModuleFactory()) { - fileIngestPipelineTemplate.add(moduleTemplate); - } - else { - dataSourceIngestPipelineTemplate.add(moduleTemplate); - } - } + this.ingestModuleTemplates = ingestModuleTemplates; } - DataSourceIngestPipeline getDataSourceIngestPipeline() { - return new DataSourceIngestPipeline(); + // RJCTODO: Added provisionally + List getIngestModuleTemplates() { + return ingestModuleTemplates; } - - FileIngestPipeline getFileIngestPipeline() { - return new FileIngestPipeline(); + + void ingestFile(int threadId, AbstractFile file) { + FileIngestPipeline pipeline; + if (!fileIngestPipelines.containsKey(threadId)) { + pipeline = new FileIngestPipeline(); + fileIngestPipelines.put(threadId, pipeline); + } + else { + pipeline = fileIngestPipelines.get(threadId); + } + pipeline.ingestFile(file); + } + + void ingestDataSource(int threadId, Content dataSource) { + DataSourceIngestPipeline pipeline; + if (!dataSourceIngestPipelines.containsKey(threadId)) { + pipeline = new DataSourceIngestPipeline(); + dataSourceIngestPipelines.put(threadId, pipeline); + } + else { + pipeline = dataSourceIngestPipelines.get(threadId); + } + pipeline.ingestDataSource(dataSource); } - - public class DataSourceIngestPipeline { - private List modules = new ArrayList<>(); - private DataSourceIngestPipeline() { - try { - for (IngestModuleTemplate moduleTemplate : dataSourceIngestPipelineTemplate) { - IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); - Serializable ingestOptions = moduleTemplate.getIngestOptions(); - DataSourceIngestModule module = moduleFactory.createDataSourceIngestModule(ingestOptions); - module.init(dataSourceTaskId); - modules.add(module); - } - } - catch (IngestModuleFactory.InvalidOptionsException ex) { - // RJCTODO: Is this a stopper condition? What about init? - } - } + void stopFileIngestPipeline() { + // RJCTODO + for (FileIngestPipeline pipeline : fileIngestPipelines.values()) { + pipeline.stop(); + } } - - public class FileIngestPipeline { - private List modules = new ArrayList<>(); + + void completeFileIngestPipeline() { + // RJCTODO + for (FileIngestPipeline pipeline : fileIngestPipelines.values()) { + pipeline.complete(); + } + } + + private class FileIngestPipeline { + private List ingestModules = new ArrayList<>(); private FileIngestPipeline() { try { - for (IngestModuleTemplate moduleTemplate : fileIngestPipelineTemplate) { + for (IngestModuleTemplate moduleTemplate : ingestModuleTemplates) { IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); - Serializable ingestOptions = moduleTemplate.getIngestOptions(); - FileIngestModule module = moduleFactory.createFileIngestModule(ingestOptions); - module.init(dataSourceTaskId); - modules.add(module); + if (moduleFactory.isFileIngestModuleFactory()) { + Serializable ingestOptions = moduleTemplate.getIngestOptions(); + FileIngestModule module = moduleFactory.createFileIngestModule(ingestOptions); + module.init(dataSourceTaskId); + ingestModules.add(module); + } } } catch (IngestModuleFactory.InvalidOptionsException ex) { @@ -90,11 +103,54 @@ public class IngestPipelines { } } - void doTask(FileTask fileTask) { - final DataSourceTask dataSourceTask = fileTask.getDataSourceTask(); - final AbstractFile fileToProcess = fileTask.getFile(); + void init() { + for (FileIngestModule module : ingestModules) { + module.init(dataSourceTaskId); + } + } + + void ingestFile(AbstractFile file) { + for (FileIngestModule module : ingestModules) { + module.process(file); + } + file.close(); + } + + void stop() { + for (FileIngestModule module : ingestModules) { + module.stop(); + } + } - fileToProcess.close(); + void complete() { + for (FileIngestModule module : ingestModules) { + module.complete(); + } } } + + private class DataSourceIngestPipeline { + private final List modules = new ArrayList<>(); + + private DataSourceIngestPipeline() { + try { + for (IngestModuleTemplate moduleTemplate : ingestModuleTemplates) { + IngestModuleFactory moduleFactory = moduleTemplate.getIngestModuleFactory(); + if (moduleFactory.isDataSourceIngestModuleFactory()) { + Serializable ingestOptions = moduleTemplate.getIngestOptions(); + DataSourceIngestModule module = moduleFactory.createDataSourceIngestModule(ingestOptions); + module.init(dataSourceTaskId); + modules.add(module); + } + } + } + catch (IngestModuleFactory.InvalidOptionsException ex) { + // RJCTODO: Is this a stopper condition? What about trial init? + } + } + + void ingestDataSource(Content dataSource) { + // RJCTODO + } + } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java index 3b9836bf68..ff3440c6ae 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java @@ -27,6 +27,9 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.TreeSet; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ConcurrentSkipListSet; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; @@ -101,61 +104,123 @@ class IngestScheduler { * */ static class FileScheduler implements Iterator { - //root folders enqueued - private TreeSet rootProcessTasks; - - //stack of current dirs to be processed recursively - private List curDirProcessTasks; - - //list of files being processed in the currently processed directory - private LinkedList curFileProcessTasks; //need to add to start and end quickly - - //estimated total files to be enqueued for currently scheduled content objects - private int filesEnqueuedEst; - private int filesDequeued; - private final static int FAT_NTFS_FLAGS = - TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT12.getValue() + private final static int FAT_NTFS_FLAGS = TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT12.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT16.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT32.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS.getValue(); + private final ConcurrentHashMap dataSourceTasks = new ConcurrentHashMap<>(); // RJCTODO: Why weren't these concurrent before? Synchronized methods? + private final ConcurrentSkipListSet rootDirectoryTasks = new ConcurrentSkipListSet<>(new RootTaskComparator()); + private final List directoryTasks = new ArrayList<>(); +// private final ConcurrentLinkedQueue directoryTasks = new ConcurrentLinkedQueue<>(); + private final LinkedList fileTasks = new LinkedList<>(); +// private final ConcurrentLinkedQueue fileTasks = new ConcurrentLinkedQueue<>(); + private int filesEnqueuedEst = 0; //estimated total files to be enqueued for currently scheduled content objects + private int filesDequeued = 0; private FileScheduler() { - rootProcessTasks = new TreeSet<>(new RootTaskComparator()); - curDirProcessTasks = new ArrayList<>(); - curFileProcessTasks = new LinkedList<>(); - resetCounters(); } - private void resetCounters() { - filesEnqueuedEst = 0; - filesDequeued = 0; - } - @Override public synchronized String toString() { StringBuilder sb = new StringBuilder(); - sb.append("\nRootDirs(sorted), size: ").append(rootProcessTasks.size()); - for (FileTask task : rootProcessTasks) { + sb.append("\nRootDirs(sorted), size: ").append(rootDirectoryTasks.size()); + for (FileTask task : rootDirectoryTasks) { sb.append(task.toString()).append(" "); } - sb.append("\nCurDirs(stack), size: ").append(curDirProcessTasks.size()); - for (FileTask task : curDirProcessTasks) { + sb.append("\nCurDirs(stack), size: ").append(directoryTasks.size()); + for (FileTask task : directoryTasks) { sb.append(task.toString()).append(" "); } - sb.append("\nCurFiles, size: ").append(curFileProcessTasks.size()); - for (FileTask task : curFileProcessTasks) { + sb.append("\nCurFiles, size: ").append(fileTasks.size()); + for (FileTask task : fileTasks) { sb.append(task.toString()).append(" "); } return sb.toString(); } + synchronized void scheduleIngestOfFiles(DataSourceTask dataSourceTask) { + // Save the data source task to manage its pipelines. + dataSourceTasks.put(dataSourceTask.getTaskId(), dataSourceTask); + + Content dataSource = dataSourceTask.getDataSource(); + Collection rootObjects = dataSource.accept(new GetRootDirVisitor()); + List firstLevelFiles = new ArrayList<>(); + if (rootObjects.isEmpty() && dataSource instanceof AbstractFile) { + // The data source is file. + firstLevelFiles.add((AbstractFile)dataSource); + } + else { + for (AbstractFile root : rootObjects) { + List children; + try { + children = root.getChildren(); + if (children.isEmpty()) { + //add the root itself, could be unalloc file, child of volume or image // RJCTODO: Get explanation, improve comment + firstLevelFiles.add(root); + } + else { + //root for fs root dir, schedule children dirs/files // RJCTODO: Get explanation, improve comment + for (Content child : children) { + if (child instanceof AbstractFile) { + firstLevelFiles.add((AbstractFile) child); + } + } + } + } + catch (TskCoreException ex) { + logger.log(Level.WARNING, "Could not get children of root to enqueue: " + root.getId() + ": " + root.getName(), ex); + } + } + } + + for (AbstractFile firstLevelFile : firstLevelFiles) { + FileTask fileTask = new FileTask(firstLevelFile, dataSourceTask); + if (shouldEnqueueTask(fileTask)) { + rootDirectoryTasks.add(fileTask); + // RJCTODO: Increment DataSourceTask counters (not necesssary if scanninf) + } + } + + // Update approx count of files to process in queues + filesEnqueuedEst = queryNumFilesinEnqueuedContents(); + + // Reshuffle/update the dir and file level queues if needed + updateQueues(); + } + + // RJCTODO: + /** + * Schedule a file to the file ingest, with associated modules. This + * will add the file to beginning of the file queue. The method is + * intended for rescheduling a file that is a derivative of another + * content object that has already ingested and produced this file. As a + * result, the derivative file will be scheduled with the same priority + * as the parent origin file. + * + * @param file file to be scheduled + * @param originalContext original content schedule context that was used + * to schedule the parent origin content, with the modules, settings, etc. + */ + synchronized void scheduleIngestOfDerivedFile(long dataSourceTaskId, AbstractFile file) { + DataSourceTask dataSourceTask = dataSourceTasks.get(dataSourceTaskId); + if (null == dataSourceTask) { + // RJCTODO: Handle severe error + } + + FileTask fileTask = new FileTask(file, dataSourceTask); + if (shouldEnqueueTask(fileTask)) { +// fileTasks.addFirst(fileTask); RJCTODO: Add first not supported by current concurrent collection + fileTasks.add(fileTask); + ++filesEnqueuedEst; + // RJCTODO: Update counters in data source task if not doing scanning + } + } + float getPercentageDone() { if (filesEnqueuedEst == 0) { return 0; } - return ((100.f) * filesDequeued) / filesEnqueuedEst; - } /** @@ -199,231 +264,12 @@ class IngestScheduler { int getFilesDequeued() { return filesDequeued; } - - /** - * Task for a specific file to process. More specific than the - * higher-level DataSourceTask. - */ - static class FileTask { - private final AbstractFile file; - private final DataSourceTask dataSourceTask; - - public FileTask(AbstractFile file, DataSourceTask dataSourceTask) { - this.file = file; - this.dataSourceTask = dataSourceTask; - } - - public DataSourceTask getDataSourceTask() { - return dataSourceTask; - } - - public AbstractFile getFile() { - return file; - } - - @Override - public String toString() { - try { - return "ProcessTask{" + "file=" + file.getId() + ": " - + file.getUniquePath() + "}"; // + ", dataSourceTask=" + dataSourceTask + '}'; - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Cound not get unique path of file in queue, ", ex); - } - return "ProcessTask{" + "file=" + file.getId() + ": " - + file.getName() + '}'; - } - - /** - * two process tasks are equal when the file/dir and modules are the - * same this enables are not to queue up the same file/dir, modules - * tuples into the root dir set - * - * @param obj - * @return - */ - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final FileTask other = (FileTask) obj; - if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) { - return false; - } - DataSourceTask thisTask = this.getDataSourceTask(); - DataSourceTask otherTask = other.getDataSourceTask(); - - if (thisTask != otherTask - && (thisTask == null || !thisTask.equals(otherTask))) { - return false; - } - return true; - } - - - /** - * Create 1 or more ProcessTasks for each root dir in the Content from - * the context supplied - * - * @param context the original ingest context - * @return - */ - private static List createFromScheduledTask(DataSourceTask scheduledTask) { - final Content scheduledContent = scheduledTask.getContent(); - Collection rootObjects = scheduledContent.accept(new GetRootDirVisitor()); - List firstLevelFiles = new ArrayList<>(); - if (rootObjects.isEmpty() && scheduledContent instanceof AbstractFile) { - //add the root, which is a leaf itself - firstLevelFiles.add((AbstractFile) scheduledContent); - } else { - for (AbstractFile root : rootObjects) { - //TODO the type-specific AbstractFile getChildren() method - List children; - try { - children = root.getChildren(); - if (children.isEmpty()) { - //add the root itself, could be unalloc file, child of volume or image - firstLevelFiles.add(root); - } else { - //root for fs root dir, schedule children dirs/files - for (Content child : children) { - if (child instanceof AbstractFile) { - firstLevelFiles.add((AbstractFile) child); - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Could not get children of root to enqueue: " - + root.getId() + ": " + root.getName(), ex); - } - - } - } - - List processTasks = new ArrayList<>(); - for (AbstractFile firstLevelFile : firstLevelFiles) { - FileTask newTask = new FileTask(firstLevelFile, scheduledTask); - if (shouldEnqueueTask(newTask)) { - processTasks.add(newTask); - } - } - return processTasks; - } - } - - /** - * Remove duplicated tasks from previous ingest enqueue currently it - * removes all previous tasks scheduled in queues for this Content - * - * @param task tasks similar to this one should be removed - */ - private void removeDupTasks(DataSourceTask task) { - final Content inputContent = task.getContent(); - - //remove from root queue - List toRemove = new ArrayList<>(); - for (FileTask pt : rootProcessTasks) { - if (pt.getDataSourceTask().getContent().equals(inputContent)) { - toRemove.add(pt); - } - } - rootProcessTasks.removeAll(toRemove); - - //remove from dir stack - toRemove = new ArrayList<>(); - for (FileTask pt : curDirProcessTasks) { - if (pt.getDataSourceTask().getContent().equals(inputContent)) { - toRemove.add(pt); - } - } - curDirProcessTasks.removeAll(toRemove); - - //remove from file queue - toRemove = new ArrayList<>(); - for (FileTask pt : curFileProcessTasks) { - if (pt.getDataSourceTask().getContent().equals(inputContent)) { - toRemove.add(pt); - } - } - curFileProcessTasks.removeAll(toRemove); - - - } - - /** - * Schedule a file to the file ingest, with associated modules. This - * will add the file to beginning of the file queue. The method is - * intended for rescheduling a file that is a derivative of another - * content object that has already ingested and produced this file. As a - * result, the derivative file will be scheduled with the same priority - * as the parent origin file. - * - * @param file file to be scheduled - * @param originalContext original content schedule context that was used - * to schedule the parent origin content, with the modules, settings, etc. - */ - synchronized void schedule(AbstractFile file, PipelineContext originalContext) { - DataSourceTask originalTask = originalContext.getDataSourceTask(); - - //skip if task contains no modules - if (originalTask.getModules().isEmpty()) { - return; - } - - FileTask fileTask = new FileTask(file, originalContext.getDataSourceTask()); - if (shouldEnqueueTask(fileTask)) { - this.curFileProcessTasks.addFirst(fileTask); - ++filesEnqueuedEst; - } - - - } - - /** - * Schedule new Content object for a file ingest with associated - * modules. - * - * @param context context to schedule, with scheduled task containing content to process and modules - */ - synchronized void schedule(DataSourceTask task) { - - //skip if task contains no modules - if (task.getModules().isEmpty()) { - return; - } - - final Content contentToSchedule = task.getContent(); - - if (getSourceContent().contains(contentToSchedule)) { - //reset counters if the same content enqueued twice - //Note, not very accurate, because we may have processed some files from - //another content - this.filesDequeued = 0; - } - - //remove duplicate scheduled tasks still in queues for this content if enqueued previously - removeDupTasks(task); - - List rootTasks = FileTask.createFromScheduledTask(task); - - //adds and resorts the tasks - this.rootProcessTasks.addAll(rootTasks); - - //update approx count of files to process in queues - this.filesEnqueuedEst = this.queryNumFilesinEnqueuedContents(); - - //reshuffle/update the dir and file level queues if needed - updateQueues(); - - } - + @Override public synchronized boolean hasNext() { - if (curFileProcessTasks.isEmpty()) { - resetCounters(); + if (fileTasks.isEmpty()) { + filesEnqueuedEst = 0; + filesDequeued = 0; return false; } return true; @@ -436,7 +282,7 @@ class IngestScheduler { } //dequeue the last in the list - final FileTask task = curFileProcessTasks.pollLast(); + final FileTask task = fileTasks.pollLast(); filesDequeued++; updateQueues(); @@ -453,28 +299,28 @@ class IngestScheduler { // that do not get enqueued while (true) { // There are files in the queue, we're done - if (this.curFileProcessTasks.isEmpty() == false) { + if (this.fileTasks.isEmpty() == false) { return; } // fill in the directory queue if it is empty. - if (this.curDirProcessTasks.isEmpty()) { + if (this.directoryTasks.isEmpty()) { // bail out if root is also empty -- we are done - if (rootProcessTasks.isEmpty()) { + if (rootDirectoryTasks.isEmpty()) { return; } - FileTask rootTask = this.rootProcessTasks.pollFirst(); - curDirProcessTasks.add(rootTask); + FileTask rootTask = this.rootDirectoryTasks.pollFirst(); + directoryTasks.add(rootTask); } //pop and push AbstractFile directory children if any //add the popped and its leaf children onto cur file list - FileTask parentTask = curDirProcessTasks.remove(curDirProcessTasks.size() - 1); + FileTask parentTask = directoryTasks.remove(directoryTasks.size() - 1); final AbstractFile parentFile = parentTask.file; // add itself to the file list if (shouldEnqueueTask(parentTask)) { - this.curFileProcessTasks.addLast(parentTask); + this.fileTasks.addLast(parentTask); } // add its children to the file and directory lists @@ -486,10 +332,10 @@ class IngestScheduler { FileTask childTask = new FileTask(childFile, parentTask.getDataSourceTask()); if (childFile.hasChildren()) { - this.curDirProcessTasks.add(childTask); + this.directoryTasks.add(childTask); } else if (shouldEnqueueTask(childTask)) { - this.curFileProcessTasks.addLast(childTask); + this.fileTasks.addLast(childTask); } } } @@ -515,19 +361,19 @@ class IngestScheduler { * enqueued */ synchronized List getSourceContent() { - final Set contentSet = new HashSet(); + final Set contentSet = new HashSet<>(); - for (FileTask task : rootProcessTasks) { - contentSet.add(task.getDataSourceTask().getContent()); + for (FileTask task : rootDirectoryTasks) { + contentSet.add(task.getDataSourceTask().getDataSource()); } - for (FileTask task : curDirProcessTasks) { - contentSet.add(task.getDataSourceTask().getContent()); + for (FileTask task : directoryTasks) { + contentSet.add(task.getDataSourceTask().getDataSource()); } - for (FileTask task : curFileProcessTasks) { - contentSet.add(task.getDataSourceTask().getContent()); + for (FileTask task : fileTasks) { + contentSet.add(task.getDataSourceTask().getDataSource()); } - return new ArrayList(contentSet); + return new ArrayList<>(contentSet); } /** @@ -535,41 +381,42 @@ class IngestScheduler { * @param module * @return true if it is in the queue. */ - synchronized boolean hasModuleEnqueued(IngestModuleAbstractFile module) { - for (FileTask task : rootProcessTasks) { - List modules = task.getDataSourceTask().getModules(); - for (IngestModuleAbstractFile m : modules) { - if (m.getName().equals(module.getName())) { - return true; - } - } - } - - for (FileTask task : curDirProcessTasks) { - List modules = task.getDataSourceTask().getModules(); - for (IngestModuleAbstractFile m : modules) { - if (m.getName().equals(module.getName())) { - return true; - } - } - } - - for (FileTask task : curFileProcessTasks) { - List modules = task.getDataSourceTask().getModules(); - for (IngestModuleAbstractFile m : modules) { - if (m.getName().equals(module.getName())) { - return true; - } - } - } - - return false; - } + // RJCTODO: Remove +// synchronized boolean hasModuleEnqueued(IngestModuleAbstractFile module) { +// for (FileTask task : rootProcessTasks) { +// List modules = task.getDataSourceTask().getModules(); +// for (IngestModuleAbstractFile m : modules) { +// if (m.getName().equals(module.getName())) { +// return true; +// } +// } +// } +// +// for (FileTask task : curDirProcessTasks) { +// List modules = task.getDataSourceTask().getModules(); +// for (IngestModuleAbstractFile m : modules) { +// if (m.getName().equals(module.getName())) { +// return true; +// } +// } +// } +// +// for (FileTask task : curFileProcessTasks) { +// List modules = task.getDataSourceTask().getModules(); +// for (IngestModuleAbstractFile m : modules) { +// if (m.getName().equals(module.getName())) { +// return true; +// } +// } +// } +// +// return false; +// } synchronized void empty() { - this.rootProcessTasks.clear(); - this.curDirProcessTasks.clear(); - this.curFileProcessTasks.clear(); + this.rootDirectoryTasks.clear(); + this.directoryTasks.clear(); + this.fileTasks.clear(); } /** @@ -583,7 +430,7 @@ class IngestScheduler { final AbstractFile aFile = processTask.file; //if it's unalloc file, skip if so scheduled - if (processTask.getDataSourceTask().isProcessUnalloc() == false + if (processTask.getDataSourceTask().getProcessUnallocatedSpace() == false && aFile.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS //unalloc files )) { return false; @@ -637,6 +484,77 @@ class IngestScheduler { return true; } + /** + * Task for a specific file to process. More specific than the + * higher-level DataSourceTask. + */ + static class FileTask { + private final AbstractFile file; + private final DataSourceTask dataSourceTask; + + public FileTask(AbstractFile file, DataSourceTask dataSourceTask) { + this.file = file; + this.dataSourceTask = dataSourceTask; + } + + + void execute(int threadId) { + // RJCTODO + dataSourceTask.getIngestPipelines().ingestFile(threadId, file); + dataSourceTask.fileTaskCompleted(); + } + + public DataSourceTask getDataSourceTask() { + return dataSourceTask; + } + + public AbstractFile getFile() { + return file; + } + + @Override + public String toString() { + try { + return "ProcessTask{" + "file=" + file.getId() + ": " + + file.getUniquePath() + "}"; // + ", dataSourceTask=" + dataSourceTask + '}'; + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Cound not get unique path of file in queue, ", ex); + } + return "ProcessTask{" + "file=" + file.getId() + ": " + + file.getName() + '}'; + } + + /** + * two process tasks are equal when the file/dir and modules are the + * same this enables are not to queue up the same file/dir, modules + * tuples into the root dir set + * + * @param obj + * @return + */ + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + final FileTask other = (FileTask) obj; + if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) { + return false; + } + DataSourceTask thisTask = this.getDataSourceTask(); + DataSourceTask otherTask = other.getDataSourceTask(); + + if (thisTask != otherTask + && (thisTask == null || !thisTask.equals(otherTask))) { + return false; + } + return true; + } + } + /** * Root dir sorter */ @@ -883,60 +801,36 @@ class IngestScheduler { /** * DataSourceScheduler ingest scheduler */ - static class DataSourceScheduler implements Iterator> { + static class DataSourceScheduler implements Iterator { - private LinkedList> tasks; + private LinkedList tasks; DataSourceScheduler() { - tasks = new LinkedList>(); + tasks = new LinkedList<>(); } - synchronized void schedule(DataSourceTask task) { - - //skip if task contains no modules - if (task.getModules().isEmpty()) { - return; - } - + synchronized void schedule(DataSourceTask task) { try { - if (task.getContent().getParent() != null) { + if (task.getDataSource().getParent() != null) { //only accepting parent-less content objects (Image, parentless VirtualDirectory) - logger.log(Level.SEVERE, "Only parent-less Content (data sources) can be scheduled for DataSource ingest, skipping: " + task.getContent()); + logger.log(Level.SEVERE, "Only parent-less Content (data sources) can be scheduled for DataSource ingest, skipping: {0}", task.getDataSource()); return; } } catch (TskCoreException e) { - logger.log(Level.SEVERE, "Error validating data source to be scheduled for DataSource ingest" + task.getContent(), e); + logger.log(Level.SEVERE, "Error validating data source to be scheduled for DataSource ingest" + task.getDataSource(), e); return; } - // see if we already have a task for this data source - DataSourceTask existTask = null; - for (DataSourceTask curTask : tasks) { - if (curTask.getContent().equals(task.getContent())) { - existTask = curTask; - break; - } - } - - // add these modules to the existing task for the data source - // @@@ BC: I'm not sure I like this and it will probably break a more formal pipeline structure - // @@@ TODO: Verify that if this is called mid-way during ingest that all of the already ingested files get scheduled with the new modules... - if (existTask != null) { - //merge modules for the data source task - existTask.addModules(task.getModules()); - } else { - //enqueue a new task - tasks.addLast(task); - } + tasks.addLast(task); } @Override - public synchronized DataSourceTask next() throws IllegalStateException { + public synchronized DataSourceTask next() throws IllegalStateException { if (!hasNext()) { throw new IllegalStateException("There is no data source tasks in the queue, check hasNext()"); } - final DataSourceTask ret = tasks.pollFirst(); + final DataSourceTask ret = tasks.pollFirst(); return ret; } @@ -947,8 +841,8 @@ class IngestScheduler { */ synchronized List getContents() { List contents = new ArrayList(); - for (DataSourceTask task : tasks) { - contents.add(task.getContent()); + for (DataSourceTask task : tasks) { + contents.add(task.getDataSource()); } return contents; } @@ -975,7 +869,7 @@ class IngestScheduler { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DataSourceQueue, size: ").append(getCount()); - for (DataSourceTask task : tasks) { + for (DataSourceTask task : tasks) { sb.append(task.toString()).append(" "); } return sb.toString(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java index 93f9edd626..217d59fdab 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java @@ -86,8 +86,8 @@ public class IngestServices { * @param module module to get the logger for * @return logger object */ - public Logger getLogger(IngestModuleAbstract module) { - return Logger.getLogger(module.getName()); + public Logger getLogger(IngestModule module) { + return Logger.getLogger(module.getDisplayName()); } /** @@ -136,9 +136,9 @@ public class IngestServices { * @param file file to be scheduled * @param pipelineContext the ingest context for the file ingest pipeline */ - public void scheduleFile(AbstractFile file, PipelineContext pipelineContext) { - logger.log(Level.INFO, "Scheduling file: " + file.getName()); - manager.scheduleFile(file, pipelineContext); + public void scheduleFile(long dataSourceTaskId, AbstractFile file) { + logger.log(Level.INFO, "Scheduling file: {0}", file.getName()); + manager.scheduleFile(dataSourceTaskId, file); } @@ -152,8 +152,7 @@ public class IngestServices { return manager.getFreeDiskSpace(); } - - + // RJCTODO: Thsi may be obsolete /** * Facility for a file ingest module to check a return value from a previously run file ingest module * that executed for the same file. @@ -162,9 +161,9 @@ public class IngestServices { * @param moduleName registered module name of the module to check the return value of * @return the return value of the previously executed module for the currently processed file in the file ingest pipeline */ - public IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { - return manager.getAbstractFileModuleResult(moduleName); - } +// public IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { +// return manager.getAbstractFileModuleResult(moduleName); +// } /** * Gets a specific name/value configuration setting for a module diff --git a/Core/src/org/sleuthkit/autopsy/ingest/PipelineContext.java b/Core/src/org/sleuthkit/autopsy/ingest/PipelineContext.java deleted file mode 100644 index 1e2b69e981..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/PipelineContext.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -import java.util.Objects; - -/** - * Stores information about a given pipeline, which is a series of modules. - * This is passed into modules for their reference. - * - * @param T type of the ingest associated with the context (file or data source Content) - * - */ -public class PipelineContext { - private final DataSourceTask task; - - PipelineContext(DataSourceTask task) { - this.task = task; - } - - /** - * Returns the currently scheduled task. - * @return - */ - DataSourceTask getDataSourceTask() { - return task; - } - - - @Override - public String toString() { - return "pipelineContext{" + "task=" + task + '}'; - } - - @Override - public int hashCode() { - int hash = 5; - hash = 53 * hash + Objects.hashCode(this.task); - return hash; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final PipelineContext other = (PipelineContext) obj; - - if (!Objects.equals(this.task, other.task)) { - return false; - } - - return true; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java index 857037fa72..2e3a370b22 100644 --- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java +++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java @@ -954,7 +954,7 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; StringBuilder summary = new StringBuilder(); boolean running = false; - if (IngestManager.getDefault().isIngestRunning() || IngestManager.getDefault().areModulesRunning()) { + if (IngestManager.getDefault().isIngestRunning() || IngestManager.getDefault().isIngestRunning()) { running = true; } diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index f2283e814a..49bc81e4ea 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -16,230 +16,229 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.exifparser; - -import com.drew.imaging.ImageMetadataReader; -import com.drew.imaging.ImageProcessingException; -import com.drew.lang.GeoLocation; -import com.drew.lang.Rational; -import com.drew.metadata.Metadata; -import com.drew.metadata.exif.ExifIFD0Directory; -import com.drew.metadata.exif.ExifSubIFDDirectory; -import com.drew.metadata.exif.GpsDirectory; -import java.io.BufferedInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Date; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.ImageUtils; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.ReadContentInputStream; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; -import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; +//package org.sleuthkit.autopsy.exifparser; +// +//import com.drew.imaging.ImageMetadataReader; +//import com.drew.imaging.ImageProcessingException; +//import com.drew.lang.GeoLocation; +//import com.drew.lang.Rational; +//import com.drew.metadata.Metadata; +//import com.drew.metadata.exif.ExifIFD0Directory; +//import com.drew.metadata.exif.ExifSubIFDDirectory; +//import com.drew.metadata.exif.GpsDirectory; +//import java.io.BufferedInputStream; +//import java.io.IOException; +//import java.io.InputStream; +//import java.util.ArrayList; +//import java.util.Collection; +//import java.util.Date; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.ImageUtils; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.ReadContentInputStream; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; +//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; /** * Ingest module to parse image Exif metadata. Currently only supports JPEG * files. Ingests an image file and, if available, adds it's date, latitude, * longitude, altitude, device model, and device make to a blackboard artifact. */ -public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { - - private IngestServices services; - final public static String MODULE_NAME = "Exif Parser"; - final public static String MODULE_VERSION = Version.getVersion(); - private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); - private static ExifParserFileIngestModule defaultInstance = null; - private int filesProcessed = 0; - private boolean filesToFire = false; - - //file ingest modules require a private constructor - //to ensure singleton instances - private ExifParserFileIngestModule() { - } - - //default instance used for module registration - public static synchronized ExifParserFileIngestModule getDefault() { - if (defaultInstance == null) { - defaultInstance = new ExifParserFileIngestModule(); - } - return defaultInstance; - } - - @Override - public IngestModuleAbstractFile.ProcessResult process(PipelineContext pipelineContext, AbstractFile content) { - - //skip unalloc - if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - // skip known - if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - // update the tree every 1000 files if we have EXIF data that is not being being displayed - filesProcessed++; - if ((filesToFire) && (filesProcessed % 1000 == 0)) { - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); - filesToFire = false; - } - - //skip unsupported - if (!parsableFormat(content)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - return processFile(content); - } - - public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) { - InputStream in = null; - BufferedInputStream bin = null; - - try { - in = new ReadContentInputStream(f); - bin = new BufferedInputStream(in); - - Collection attributes = new ArrayList(); - Metadata metadata = ImageMetadataReader.readMetadata(bin, true); - - // Date - ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); - if (exifDir != null) { - Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); - if (date != null) { - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000)); - } - } - - // GPS Stuff - GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); - if (gpsDir != null) { - GeoLocation loc = gpsDir.getGeoLocation(); - if (loc != null) { - double latitude = loc.getLatitude(); - double longitude = loc.getLongitude(); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude)); - } - - Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); - if (altitude != null) { - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue())); - } - } - - // Device info - ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); - if (devDir != null) { - String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); - if (model != null && !model.isEmpty()) { - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model)); - } - - String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); - if (make != null && !make.isEmpty()) { - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make)); - } - } - - // Add the attributes, if there are any, to a new artifact - if (!attributes.isEmpty()) { - BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); - bba.addAttributes(attributes); - filesToFire = true; - } - - return IngestModuleAbstractFile.ProcessResult.OK; - - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ")."); - } catch (ImageProcessingException ex) { - logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")"); - } catch (IOException ex) { - logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); - } finally { - try { - if (in != null) { - in.close(); - } - if (bin != null) { - bin.close(); - } - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to close InputStream.", ex); - } - } - - // If we got here, there was an error - return IngestModuleAbstractFile.ProcessResult.ERROR; - } - - /** - * Checks if should try to attempt to extract exif. Currently checks if JPEG - * image (by signature) - * - * @param f file to be checked - * - * @return true if to be processed - */ - private boolean parsableFormat(AbstractFile f) { - return ImageUtils.isJpegFileHeader(f); - } - - @Override - public void complete() { - logger.log(Level.INFO, "completed exif parsing " + this.toString()); - if (filesToFire) { - //send the final new data event - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); - } - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public String getName() { - return "Exif Image Parser"; - } - - @Override - public String getDescription() { - return "Ingests JPEG files and retrieves their EXIF metadata."; - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - logger.log(Level.INFO, "init() " + this.toString()); - - filesProcessed = 0; - filesToFire = false; - } - - @Override - public void stop() { - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} \ No newline at end of file +//public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { +// +// private IngestServices services; +// final public static String MODULE_NAME = "Exif Parser"; +// final public static String MODULE_VERSION = Version.getVersion(); +// private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); +// private static ExifParserFileIngestModule defaultInstance = null; +// private int filesProcessed = 0; +// private boolean filesToFire = false; +// +// //file ingest modules require a private constructor +// //to ensure singleton instances +// private ExifParserFileIngestModule() { +// } +// +// //default instance used for module registration +// public static synchronized ExifParserFileIngestModule getDefault() { +// if (defaultInstance == null) { +// defaultInstance = new ExifParserFileIngestModule(); +// } +// return defaultInstance; +// } +// +// @Override +// public IngestModuleAbstractFile.ProcessResult process(PipelineContext pipelineContext, AbstractFile content) { +// +// //skip unalloc +// if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { +// return IngestModuleAbstractFile.ProcessResult.OK; +// } +// +// // skip known +// if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { +// return IngestModuleAbstractFile.ProcessResult.OK; +// } +// +// // update the tree every 1000 files if we have EXIF data that is not being being displayed +// filesProcessed++; +// if ((filesToFire) && (filesProcessed % 1000 == 0)) { +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); +// filesToFire = false; +// } +// +// //skip unsupported +// if (!parsableFormat(content)) { +// return IngestModuleAbstractFile.ProcessResult.OK; +// } +// +// return processFile(content); +// } +// +// public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) { +// InputStream in = null; +// BufferedInputStream bin = null; +// +// try { +// in = new ReadContentInputStream(f); +// bin = new BufferedInputStream(in); +// +// Collection attributes = new ArrayList(); +// Metadata metadata = ImageMetadataReader.readMetadata(bin, true); +// +// // Date +// ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); +// if (exifDir != null) { +// Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); +// if (date != null) { +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000)); +// } +// } +// +// // GPS Stuff +// GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); +// if (gpsDir != null) { +// GeoLocation loc = gpsDir.getGeoLocation(); +// if (loc != null) { +// double latitude = loc.getLatitude(); +// double longitude = loc.getLongitude(); +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude)); +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude)); +// } +// +// Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); +// if (altitude != null) { +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue())); +// } +// } +// +// // Device info +// ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); +// if (devDir != null) { +// String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); +// if (model != null && !model.isEmpty()) { +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model)); +// } +// +// String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); +// if (make != null && !make.isEmpty()) { +// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make)); +// } +// } +// +// // Add the attributes, if there are any, to a new artifact +// if (!attributes.isEmpty()) { +// BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); +// bba.addAttributes(attributes); +// filesToFire = true; +// } +// +// return IngestModuleAbstractFile.ProcessResult.OK; +// +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ")."); +// } catch (ImageProcessingException ex) { +// logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")"); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); +// } finally { +// try { +// if (in != null) { +// in.close(); +// } +// if (bin != null) { +// bin.close(); +// } +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed to close InputStream.", ex); +// } +// } +// +// // If we got here, there was an error +// return IngestModuleAbstractFile.ProcessResult.ERROR; +// } +// +// /** +// * Checks if should try to attempt to extract exif. Currently checks if JPEG +// * image (by signature) +// * +// * @param f file to be checked +// * +// * @return true if to be processed +// */ +// private boolean parsableFormat(AbstractFile f) { +// return ImageUtils.isJpegFileHeader(f); +// } +// +// @Override +// public void complete() { +// logger.log(Level.INFO, "completed exif parsing " + this.toString()); +// if (filesToFire) { +// //send the final new data event +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); +// } +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public String getName() { +// return "Exif Image Parser"; +// } +// +// @Override +// public String getDescription() { +// return "Ingests JPEG files and retrieves their EXIF metadata."; +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// logger.log(Level.INFO, "init() " + this.toString()); +// +// filesProcessed = 0; +// filesToFire = false; +// } +// +// @Override +// public void stop() { +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} \ No newline at end of file diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/AddFileExtensionAction.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/AddFileExtensionAction.java index a2a9f12747..d673d77241 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/AddFileExtensionAction.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/AddFileExtensionAction.java @@ -43,18 +43,18 @@ class AddFileExtensionAction extends AbstractAction { @Override public void actionPerformed(ActionEvent event) { - HashMap editableMap = FileExtMismatchXML.getDefault().load(); - ArrayList editedExtensions = new ArrayList<>(Arrays.asList(editableMap.get(mimeTypeStr))); - editedExtensions.add(extStr); - - // Old array will be replaced by new array for this key - editableMap.put(mimeTypeStr, editedExtensions.toArray(new String[0])); - - if (!FileExtMismatchXML.getDefault().save(editableMap)) { - //error - JOptionPane.showMessageDialog(null, "Writing XML configuration file failed.", "Add Mismatch Extension Error", JOptionPane.ERROR_MESSAGE); - } // else //in the future we might want to update the statusbar to give feedback to the user - +// HashMap editableMap = FileExtMismatchXML.getDefault().load(); +// ArrayList editedExtensions = new ArrayList<>(Arrays.asList(editableMap.get(mimeTypeStr))); +// editedExtensions.add(extStr); +// +// // Old array will be replaced by new array for this key +// editableMap.put(mimeTypeStr, editedExtensions.toArray(new String[0])); +// +// if (!FileExtMismatchXML.getDefault().save(editableMap)) { +// //error +// JOptionPane.showMessageDialog(null, "Writing XML configuration file failed.", "Add Mismatch Extension Error", JOptionPane.ERROR_MESSAGE); +// } // else //in the future we might want to update the statusbar to give feedback to the user +// } } diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchConfigPanel.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchConfigPanel.java index 4a027b6ba2..e6671e6662 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchConfigPanel.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchConfigPanel.java @@ -30,15 +30,15 @@ import javax.swing.ListSelectionModel; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; -import org.sleuthkit.autopsy.corecomponents.OptionsPanel; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.filetypeid.FileTypeIdIngestModule; +//import org.sleuthkit.autopsy.corecomponents.OptionsPanel; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.filetypeid.FileTypeIdIngestModule; RJCTODO /** * Container panel for File Extension Mismatch Ingest Module advanced configuration options */ -final class FileExtMismatchConfigPanel extends javax.swing.JPanel implements OptionsPanel { - private static Logger logger = Logger.getLogger(FileExtMismatchConfigPanel.class.getName()); +final class FileExtMismatchConfigPanel extends javax.swing.JPanel /*implements OptionsPanel*/ { +// private static Logger logger = Logger.getLogger(FileExtMismatchConfigPanel.class.getName()); private HashMap editableMap = new HashMap<>(); private ArrayList mimeList = null; private ArrayList currentExtensions = null; @@ -402,12 +402,13 @@ final class FileExtMismatchConfigPanel extends javax.swing.JPanel implements Opt mimeErrLabel.setText("MIME type already exists!"); return; } - - if (!FileTypeIdIngestModule.isMimeTypeDetectable(newMime)) { - mimeErrLabel.setForeground(Color.red); - mimeErrLabel.setText("MIME type is not detectable by this module."); - return; - } + + // RJCTODO +// if (!FileTypeIdIngestModule.isMimeTypeDetectable(newMime)) { +// mimeErrLabel.setForeground(Color.red); +// mimeErrLabel.setText("MIME type is not detectable by this module."); +// return; +// } editableMap.put(newMime, new String[0]); @@ -501,29 +502,29 @@ final class FileExtMismatchConfigPanel extends javax.swing.JPanel implements Opt } } - @Override +// @Override public void load() { // Load the XML into a buffer that the user can modify. They can choose // to save it back to the file after making changes. - editableMap = FileExtMismatchXML.getDefault().load(); +// editableMap = FileExtMismatchXML.getDefault().load(); updateMimeList(); updateExtList(); } - @Override +// @Override public void store() { - if (FileExtMismatchXML.getDefault().save(editableMap)) { - mimeErrLabel.setText(" "); - mimeRemoveErrLabel.setText(" "); - extRemoveErrLabel.setText(" "); - extErrorLabel.setText(" "); - - saveMsgLabel.setText("Saved."); - saveButton.setEnabled(false); - } else { - //error - JOptionPane.showMessageDialog(this, "Writing XML configuration file failed.", "Save Error", JOptionPane.ERROR_MESSAGE); - } +// if (FileExtMismatchXML.getDefault().save(editableMap)) { +// mimeErrLabel.setText(" "); +// mimeRemoveErrLabel.setText(" "); +// extRemoveErrLabel.setText(" "); +// extErrorLabel.setText(" "); +// +// saveMsgLabel.setText("Saved."); +// saveButton.setEnabled(false); +// } else { +// //error +// JOptionPane.showMessageDialog(this, "Writing XML configuration file failed.", "Save Error", JOptionPane.ERROR_MESSAGE); +// } } private void setIsModified() { @@ -606,18 +607,18 @@ final class FileExtMismatchConfigPanel extends javax.swing.JPanel implements Opt @Override public Object getValueAt(int rowIndex, int columnIndex) { Object ret = null; - if ((mimeList == null) || (rowIndex > mimeList.size())) { - return ""; - } - String word = mimeList.get(rowIndex); - switch (columnIndex) { - case 0: - ret = (Object) word; - break; - default: - logger.log(Level.SEVERE, "Invalid table column index: " + columnIndex); - break; - } +// if ((mimeList == null) || (rowIndex > mimeList.size())) { +// return ""; +// } +// String word = mimeList.get(rowIndex); +// switch (columnIndex) { +// case 0: +// ret = (Object) word; +// break; +// default: +// logger.log(Level.SEVERE, "Invalid table column index: " + columnIndex); +// break; +// } return ret; } @@ -671,18 +672,18 @@ final class FileExtMismatchConfigPanel extends javax.swing.JPanel implements Opt public Object getValueAt(int rowIndex, int columnIndex) { Object ret = null; - if ((currentExtensions == null) || (currentExtensions.size() == 0) || (rowIndex > currentExtensions.size())) { - return ""; - } - String word = currentExtensions.get(rowIndex); - switch (columnIndex) { - case 0: - ret = (Object) word; - break; - default: - logger.log(Level.SEVERE, "Invalid table column index: " + columnIndex); - break; - } +// if ((currentExtensions == null) || (currentExtensions.size() == 0) || (rowIndex > currentExtensions.size())) { +// return ""; +// } +// String word = currentExtensions.get(rowIndex); +// switch (columnIndex) { +// case 0: +// ret = (Object) word; +// break; +// default: +// logger.log(Level.SEVERE, "Invalid table column index: " + columnIndex); +// break; +// } return ret; } diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchContextMenuActionsProvider.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchContextMenuActionsProvider.java index 2d6b871321..11b3a09977 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchContextMenuActionsProvider.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchContextMenuActionsProvider.java @@ -20,108 +20,108 @@ package org.sleuthkit.autopsy.fileextmismatch; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.logging.Level; -import javax.swing.Action; -import org.openide.util.Exceptions; -import org.openide.util.Lookup; -import org.openide.util.Utilities; -import org.openide.util.lookup.ServiceProvider; -import org.sleuthkit.autopsy.corecomponentinterfaces.ContextMenuActionsProvider; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestConfigurator; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * This creates a single context menu item for adding a new filename extension to - * the mismatch list for the MIME type of the selected node. - */ -@ServiceProvider(service = ContextMenuActionsProvider.class) -public class FileExtMismatchContextMenuActionsProvider implements ContextMenuActionsProvider { - @Override - public List getActions() { - ArrayList actions = new ArrayList<>(); - - // Ignore if file ingest is in progress. - IngestConfigurator ingestConfigurator = Lookup.getDefault().lookup(IngestConfigurator.class); - if (ingestConfigurator != null && !ingestConfigurator.isIngestRunning()) { - - final Collection selectedArts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); - - // Prevent multiselect - if (selectedArts.size() == 1) { - - for (BlackboardArtifact nodeArt : selectedArts) { - - // Only for mismatch results - if (nodeArt.getArtifactTypeName().equals("TSK_EXT_MISMATCH_DETECTED")) { - String mimeTypeStr = ""; - String extStr = ""; - - AbstractFile af = null; - try { - af = nodeArt.getSleuthkitCase().getAbstractFileById(nodeArt.getObjectID()); - } catch (TskCoreException ex) { - Logger.getLogger(FileExtMismatchContextMenuActionsProvider.class.getName()).log(Level.SEVERE, "Error getting file by id", ex); - } - - if (af != null) { - try { - int i = af.getName().lastIndexOf("."); - if ((i > -1) && ((i + 1) < af.getName().length())) { - extStr = af.getName().substring(i + 1).toLowerCase(); - } - - ArrayList artList = af.getAllArtifacts(); - for (BlackboardArtifact art : artList) { - List atrList = art.getAttributes(); - for (BlackboardAttribute att : atrList) { - if (att.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID()) { - mimeTypeStr = att.getValueString(); - break; - } - } - if (!mimeTypeStr.isEmpty()) { - break; - } - } - } catch (TskCoreException ex) { - Logger.getLogger(FileExtMismatchContextMenuActionsProvider.class.getName()).log(Level.SEVERE, "Error looking up blackboard attributes", ex); - } - - if (!extStr.isEmpty() && !mimeTypeStr.isEmpty()) { - // Limit max size so the context window doesn't get ridiculously wide - if (extStr.length() > 10) { - extStr = extStr.substring(0, 9); - } - if (mimeTypeStr.length() > 40) { - mimeTypeStr = mimeTypeStr.substring(0, 39); - } - String menuItemStr = "Add extension " + extStr + " as matching MIME type " + mimeTypeStr; - actions.add(new AddFileExtensionAction(menuItemStr, extStr, mimeTypeStr)); - - // Check if already added - HashMap editableMap = FileExtMismatchXML.getDefault().load(); - ArrayList editedExtensions = new ArrayList<>(Arrays.asList(editableMap.get(mimeTypeStr))); - if (editedExtensions.contains(extStr)) { - // Informs the user that they have already added this extension to this MIME type - actions.get(0).setEnabled(false); - } - - } - } - } - } - } - } - - return actions; - } -} +//import java.util.ArrayList; +//import java.util.Arrays; +//import java.util.Collection; +//import java.util.HashMap; +//import java.util.List; +//import java.util.logging.Level; +//import javax.swing.Action; +//import org.openide.util.Exceptions; +//import org.openide.util.Lookup; +//import org.openide.util.Utilities; +//import org.openide.util.lookup.ServiceProvider; +//import org.sleuthkit.autopsy.corecomponentinterfaces.ContextMenuActionsProvider; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.ingest.IngestConfigurator; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.TskCoreException; +// +///** +// * This creates a single context menu item for adding a new filename extension to +// * the mismatch list for the MIME type of the selected node. +// */ +//@ServiceProvider(service = ContextMenuActionsProvider.class) +//public class FileExtMismatchContextMenuActionsProvider implements ContextMenuActionsProvider { +// @Override +// public List getActions() { +// ArrayList actions = new ArrayList<>(); +// +// // Ignore if file ingest is in progress. +// IngestConfigurator ingestConfigurator = Lookup.getDefault().lookup(IngestConfigurator.class); +// if (ingestConfigurator != null && !ingestConfigurator.isIngestRunning()) { +// +// final Collection selectedArts = Utilities.actionsGlobalContext().lookupAll(BlackboardArtifact.class); +// +// // Prevent multiselect +// if (selectedArts.size() == 1) { +// +// for (BlackboardArtifact nodeArt : selectedArts) { +// +// // Only for mismatch results +// if (nodeArt.getArtifactTypeName().equals("TSK_EXT_MISMATCH_DETECTED")) { +// String mimeTypeStr = ""; +// String extStr = ""; +// +// AbstractFile af = null; +// try { +// af = nodeArt.getSleuthkitCase().getAbstractFileById(nodeArt.getObjectID()); +// } catch (TskCoreException ex) { +// Logger.getLogger(FileExtMismatchContextMenuActionsProvider.class.getName()).log(Level.SEVERE, "Error getting file by id", ex); +// } +// +// if (af != null) { +// try { +// int i = af.getName().lastIndexOf("."); +// if ((i > -1) && ((i + 1) < af.getName().length())) { +// extStr = af.getName().substring(i + 1).toLowerCase(); +// } +// +// ArrayList artList = af.getAllArtifacts(); +// for (BlackboardArtifact art : artList) { +// List atrList = art.getAttributes(); +// for (BlackboardAttribute att : atrList) { +// if (att.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID()) { +// mimeTypeStr = att.getValueString(); +// break; +// } +// } +// if (!mimeTypeStr.isEmpty()) { +// break; +// } +// } +// } catch (TskCoreException ex) { +// Logger.getLogger(FileExtMismatchContextMenuActionsProvider.class.getName()).log(Level.SEVERE, "Error looking up blackboard attributes", ex); +// } +// +// if (!extStr.isEmpty() && !mimeTypeStr.isEmpty()) { +// // Limit max size so the context window doesn't get ridiculously wide +// if (extStr.length() > 10) { +// extStr = extStr.substring(0, 9); +// } +// if (mimeTypeStr.length() > 40) { +// mimeTypeStr = mimeTypeStr.substring(0, 39); +// } +// String menuItemStr = "Add extension " + extStr + " as matching MIME type " + mimeTypeStr; +// actions.add(new AddFileExtensionAction(menuItemStr, extStr, mimeTypeStr)); +// +// // Check if already added +// HashMap editableMap = FileExtMismatchXML.getDefault().load(); +// ArrayList editedExtensions = new ArrayList<>(Arrays.asList(editableMap.get(mimeTypeStr))); +// if (editedExtensions.contains(extStr)) { +// // Informs the user that they have already added this extension to this MIME type +// actions.get(0).setEnabled(false); +// } +// +// } +// } +// } +// } +// } +// } +// +// return actions; +// } +//} diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java index 0e95916063..df5450d97f 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchIngestModule.java @@ -17,256 +17,254 @@ * limitations under the License. */ - -package org.sleuthkit.autopsy.fileextmismatch; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.logging.Level; -import org.openide.util.Exceptions; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; -import org.sleuthkit.datamodel.TskData.FileKnown; -import org.sleuthkit.datamodel.TskException; +//package org.sleuthkit.autopsy.fileextmismatch; +// +//import java.util.ArrayList; +//import java.util.Arrays; +//import java.util.Collections; +//import java.util.HashMap; +//import java.util.List; +//import java.util.logging.Level; +//import org.openide.util.Exceptions; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; +//import org.sleuthkit.datamodel.TskData.FileKnown; +//import org.sleuthkit.datamodel.TskException; /** * Flags mismatched filename extensions based on file signature. */ -public class FileExtMismatchIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { - private static FileExtMismatchIngestModule defaultInstance = null; - private static final Logger logger = Logger.getLogger(FileExtMismatchIngestModule.class.getName()); - public static final String MODULE_NAME = "Extension Mismatch Detector"; - public static final String MODULE_DESCRIPTION = "Flags files that have a non-standard extension based on their file type."; - public static final String MODULE_VERSION = Version.getVersion(); - - private static long processTime = 0; - private static int messageId = 0; - private static long numFiles = 0; - private static boolean skipKnown = false; - private static boolean skipNoExt = true; - private static boolean skipTextPlain = false; - - private FileExtMismatchSimpleConfigPanel simpleConfigPanel; - private FileExtMismatchConfigPanel advancedConfigPanel; - private IngestServices services; - private HashMap SigTypeToExtMap = new HashMap<>(); - - - // Private to ensure Singleton status - private FileExtMismatchIngestModule() { - - } - - // File-level ingest modules are currently singleton -- this is required - public static synchronized FileExtMismatchIngestModule getDefault() { - //defaultInstance is a private static class variable - if (defaultInstance == null) { - defaultInstance = new FileExtMismatchIngestModule(); - } - return defaultInstance; - } - - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - - // Load mapping - FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault(); - SigTypeToExtMap = xmlLoader.load(); - - } - - @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - // skip non-files - if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || - (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { - return ProcessResult.OK; - } - - // deleted files often have content that was not theirs and therefor causes mismatch - if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) || - (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) { - return ProcessResult.OK; - } - - if (skipKnown && (abstractFile.getKnown() == FileKnown.KNOWN)) { - return ProcessResult.OK; - } - - try - { - long startTime = System.currentTimeMillis(); - - boolean mismatchDetected = compareSigTypeToExt(abstractFile); - - processTime += (System.currentTimeMillis() - startTime); - numFiles++; - - if (mismatchDetected) { - // add artifact - BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED); - - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart))); - } - return ProcessResult.OK; - } catch (TskException ex) { - logger.log(Level.WARNING, "Error matching file signature", ex); - return ProcessResult.ERROR; - } - } - - /** - * Compare file type for file and extension. - * @param abstractFile - * @return false if the two match. True if there is a mismatch. - */ - private boolean compareSigTypeToExt(AbstractFile abstractFile) { - try { - String currActualExt = abstractFile.getNameExtension(); - - // If we are skipping names with no extension - if (skipNoExt && currActualExt.isEmpty()) { - return false; - } - - // find file_sig value. - // check the blackboard for a file type attribute - ArrayList attributes = abstractFile.getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG); - for (BlackboardAttribute attribute : attributes) { - String currActualSigType = attribute.getValueString(); - if (skipTextPlain) { - if (!currActualExt.isEmpty() && currActualSigType.equals("text/plain")) { - return false; - } - } - - //get known allowed values from the map for this type - String[] allowedExtArray = SigTypeToExtMap.get(currActualSigType); - if (allowedExtArray != null) { - List allowedExtList = Arrays.asList(allowedExtArray); - - // see if the filename ext is in the allowed list - if (allowedExtList != null) { - for (String e : allowedExtList) { - if (e.equals(currActualExt)) { - return false; - } - } - return true; //potential mismatch - } - } - } - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error while getting file signature from blackboard.", ex); - } - - return false; - } - - @Override - public void complete() { - StringBuilder detailsSb = new StringBuilder(); - //details - detailsSb.append(""); - - detailsSb.append(""); - - detailsSb.append("\n"); - detailsSb.append("\n"); - detailsSb.append("
"+MODULE_DESCRIPTION+"
Total Processing Time").append(processTime).append("
Total Files Processed").append(numFiles).append("
"); - - services.postMessage(IngestMessage.createMessage(++messageId, IngestMessage.MessageType.INFO, this, "File Extension Mismatch Results", detailsSb.toString())); - } - - @Override - public void stop() { - //do nothing - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public boolean hasSimpleConfiguration() { - return true; - } - - @Override - public boolean hasAdvancedConfiguration() { - return true; - } - - @Override - public javax.swing.JPanel getSimpleConfiguration(String context) { - if (simpleConfigPanel == null) { - simpleConfigPanel = new FileExtMismatchSimpleConfigPanel(); - } - - return simpleConfigPanel; - } - - @Override - public javax.swing.JPanel getAdvancedConfiguration(String context) { - getPanel().load(); - return getPanel(); - } - - private FileExtMismatchConfigPanel getPanel() { - if (advancedConfigPanel == null) { - advancedConfigPanel = new FileExtMismatchConfigPanel(); - } - return advancedConfigPanel; - } - - @Override - public void saveAdvancedConfiguration() { - getPanel().store(); - } - - @Override - public boolean hasBackgroundJobsRunning() { - // we're single threaded... - return false; - } - - public static void setSkipKnown(boolean flag) { - skipKnown = flag; - } - - public static void setSkipNoExt(boolean flag) { - skipNoExt = flag; - } - public static void setSkipTextPlain(boolean flag) { - skipTextPlain = flag; - } -} +//public class FileExtMismatchIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { +// private static FileExtMismatchIngestModule defaultInstance = null; +// private static final Logger logger = Logger.getLogger(FileExtMismatchIngestModule.class.getName()); +// public static final String MODULE_NAME = "Extension Mismatch Detector"; +// public static final String MODULE_DESCRIPTION = "Flags files that have a non-standard extension based on their file type."; +// public static final String MODULE_VERSION = Version.getVersion(); +// +// private static long processTime = 0; +// private static int messageId = 0; +// private static long numFiles = 0; +// private static boolean skipKnown = false; +// private static boolean skipNoExt = true; +// private static boolean skipTextPlain = false; +// +// private FileExtMismatchSimpleConfigPanel simpleConfigPanel; +// private FileExtMismatchConfigPanel advancedConfigPanel; +// private IngestServices services; +// private HashMap SigTypeToExtMap = new HashMap<>(); +// +// +// // Private to ensure Singleton status +// private FileExtMismatchIngestModule() { +// +// } +// +// // File-level ingest modules are currently singleton -- this is required +// public static synchronized FileExtMismatchIngestModule getDefault() { +// //defaultInstance is a private static class variable +// if (defaultInstance == null) { +// defaultInstance = new FileExtMismatchIngestModule(); +// } +// return defaultInstance; +// } +// +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// +// // Load mapping +// FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault(); +// SigTypeToExtMap = xmlLoader.load(); +// +// } +// +// @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// // skip non-files +// if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || +// (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { +// return ProcessResult.OK; +// } +// +// // deleted files often have content that was not theirs and therefor causes mismatch +// if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) || +// (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) { +// return ProcessResult.OK; +// } +// +// if (skipKnown && (abstractFile.getKnown() == FileKnown.KNOWN)) { +// return ProcessResult.OK; +// } +// +// try +// { +// long startTime = System.currentTimeMillis(); +// +// boolean mismatchDetected = compareSigTypeToExt(abstractFile); +// +// processTime += (System.currentTimeMillis() - startTime); +// numFiles++; +// +// if (mismatchDetected) { +// // add artifact +// BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED); +// +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart))); +// } +// return ProcessResult.OK; +// } catch (TskException ex) { +// logger.log(Level.WARNING, "Error matching file signature", ex); +// return ProcessResult.ERROR; +// } +// } +// +// /** +// * Compare file type for file and extension. +// * @param abstractFile +// * @return false if the two match. True if there is a mismatch. +// */ +// private boolean compareSigTypeToExt(AbstractFile abstractFile) { +// try { +// String currActualExt = abstractFile.getNameExtension(); +// +// // If we are skipping names with no extension +// if (skipNoExt && currActualExt.isEmpty()) { +// return false; +// } +// +// // find file_sig value. +// // check the blackboard for a file type attribute +// ArrayList attributes = abstractFile.getGenInfoAttributes(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG); +// for (BlackboardAttribute attribute : attributes) { +// String currActualSigType = attribute.getValueString(); +// if (skipTextPlain) { +// if (!currActualExt.isEmpty() && currActualSigType.equals("text/plain")) { +// return false; +// } +// } +// +// //get known allowed values from the map for this type +// String[] allowedExtArray = SigTypeToExtMap.get(currActualSigType); +// if (allowedExtArray != null) { +// List allowedExtList = Arrays.asList(allowedExtArray); +// +// // see if the filename ext is in the allowed list +// if (allowedExtList != null) { +// for (String e : allowedExtList) { +// if (e.equals(currActualExt)) { +// return false; +// } +// } +// return true; //potential mismatch +// } +// } +// } +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Error while getting file signature from blackboard.", ex); +// } +// +// return false; +// } +// +// @Override +// public void complete() { +// StringBuilder detailsSb = new StringBuilder(); +// //details +// detailsSb.append(""); +// +// detailsSb.append(""); +// +// detailsSb.append("\n"); +// detailsSb.append("\n"); +// detailsSb.append("
"+MODULE_DESCRIPTION+"
Total Processing Time").append(processTime).append("
Total Files Processed").append(numFiles).append("
"); +// +// services.postMessage(IngestMessage.createMessage(++messageId, IngestMessage.MessageType.INFO, this, "File Extension Mismatch Results", detailsSb.toString())); +// } +// +// @Override +// public void stop() { +// //do nothing +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public boolean hasSimpleConfiguration() { +// return true; +// } +// +// @Override +// public boolean hasAdvancedConfiguration() { +// return true; +// } +// +// @Override +// public javax.swing.JPanel getSimpleConfiguration(String context) { +// if (simpleConfigPanel == null) { +// simpleConfigPanel = new FileExtMismatchSimpleConfigPanel(); +// } +// +// return simpleConfigPanel; +// } +// +// @Override +// public javax.swing.JPanel getAdvancedConfiguration(String context) { +// getPanel().load(); +// return getPanel(); +// } +// +// private FileExtMismatchConfigPanel getPanel() { +// if (advancedConfigPanel == null) { +// advancedConfigPanel = new FileExtMismatchConfigPanel(); +// } +// return advancedConfigPanel; +// } +// +// @Override +// public void saveAdvancedConfiguration() { +// getPanel().store(); +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// // we're single threaded... +// return false; +// } +// +// public static void setSkipKnown(boolean flag) { +// skipKnown = flag; +// } +// +// public static void setSkipNoExt(boolean flag) { +// skipNoExt = flag; +// } +// public static void setSkipTextPlain(boolean flag) { +// skipTextPlain = flag; +// } +//} diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchOptionsPanelController.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchOptionsPanelController.java index 453d6a89eb..5a14026304 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchOptionsPanelController.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchOptionsPanelController.java @@ -4,103 +4,103 @@ */ package org.sleuthkit.autopsy.fileextmismatch; -import java.beans.PropertyChangeListener; -import java.beans.PropertyChangeSupport; -import javax.swing.JComponent; -import org.netbeans.spi.options.OptionsPanelController; -import org.openide.util.HelpCtx; -import org.openide.util.Lookup; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; - -@OptionsPanelController.TopLevelRegistration( - categoryName = "#OptionsCategory_Name_FileExtMismatchOptions", -iconBase = "org/sleuthkit/autopsy/fileextmismatch/options-icon.png", -position = 4, -keywords = "#OptionsCategory_FileExtMismatch", -keywordsCategory = "KeywordSearchOptions") -@org.openide.util.NbBundle.Messages({"OptionsCategory_Name_FileExtMismatchOptions=File Ext Mismatch", "OptionsCategory_FileExtMismatch=File Ext Mismatch"}) -public final class FileExtMismatchOptionsPanelController extends OptionsPanelController { - - private FileExtMismatchConfigPanel panel; - private final PropertyChangeSupport pcs = new PropertyChangeSupport(this); - private boolean changed; - private static final Logger logger = Logger.getLogger(FileExtMismatchOptionsPanelController.class.getName()); - @Override - public void update() { - getPanel().load(); - changed = false; - } - - @Override - public void applyChanges() { - //getPanel().store(); - getPanel().ok(); - changed = false; - } - - @Override - public void cancel() { - getPanel().cancel(); - } - - @Override - public boolean isValid() { - return getPanel().valid(); - } - - @Override - public boolean isChanged() { - return changed; - } - - @Override - public HelpCtx getHelpCtx() { - return null; // new HelpCtx("...ID") if you have a help set - } - - @Override - public JComponent getComponent(Lookup masterLookup) { - return getPanel(); - } - - @Override - public void addPropertyChangeListener(PropertyChangeListener l) { - pcs.addPropertyChangeListener(l); - } - - @Override - public void removePropertyChangeListener(PropertyChangeListener l) { - pcs.removePropertyChangeListener(l); - } - - private FileExtMismatchConfigPanel getPanel() { - if (panel == null) { - panel = new FileExtMismatchConfigPanel(); - } - return panel; - } - - void changed() { - if (!changed) { - changed = true; - - try { - pcs.firePropertyChange(OptionsPanelController.PROP_CHANGED, false, true); - } - catch (Exception e) { - logger.log(Level.SEVERE, "FileExtMismatchOptionsPanelController listener threw exception", e); - MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to FileExtMismatchOptionsPanelController updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); - } - } - - try { - pcs.firePropertyChange(OptionsPanelController.PROP_VALID, null, null); - } - catch (Exception e) { - logger.log(Level.SEVERE, "FileExtMismatchOptionsPanelController listener threw exception", e); - MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to FileExtMismatchOptionsPanelController updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); - } - } -} +//import java.beans.PropertyChangeListener; +//import java.beans.PropertyChangeSupport; +//import javax.swing.JComponent; +//import org.netbeans.spi.options.OptionsPanelController; +//import org.openide.util.HelpCtx; +//import org.openide.util.Lookup; +//import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +// +//@OptionsPanelController.TopLevelRegistration( +// categoryName = "#OptionsCategory_Name_FileExtMismatchOptions", +//iconBase = "org/sleuthkit/autopsy/fileextmismatch/options-icon.png", +//position = 4, +//keywords = "#OptionsCategory_FileExtMismatch", +//keywordsCategory = "KeywordSearchOptions") +//@org.openide.util.NbBundle.Messages({"OptionsCategory_Name_FileExtMismatchOptions=File Ext Mismatch", "OptionsCategory_FileExtMismatch=File Ext Mismatch"}) +//public final class FileExtMismatchOptionsPanelController extends OptionsPanelController { +// +// private FileExtMismatchConfigPanel panel; +// private final PropertyChangeSupport pcs = new PropertyChangeSupport(this); +// private boolean changed; +// private static final Logger logger = Logger.getLogger(FileExtMismatchOptionsPanelController.class.getName()); +// @Override +// public void update() { +// getPanel().load(); +// changed = false; +// } +// +// @Override +// public void applyChanges() { +// //getPanel().store(); +// getPanel().ok(); +// changed = false; +// } +// +// @Override +// public void cancel() { +// getPanel().cancel(); +// } +// +// @Override +// public boolean isValid() { +// return getPanel().valid(); +// } +// +// @Override +// public boolean isChanged() { +// return changed; +// } +// +// @Override +// public HelpCtx getHelpCtx() { +// return null; // new HelpCtx("...ID") if you have a help set +// } +// +// @Override +// public JComponent getComponent(Lookup masterLookup) { +// return getPanel(); +// } +// +// @Override +// public void addPropertyChangeListener(PropertyChangeListener l) { +// pcs.addPropertyChangeListener(l); +// } +// +// @Override +// public void removePropertyChangeListener(PropertyChangeListener l) { +// pcs.removePropertyChangeListener(l); +// } +// +// private FileExtMismatchConfigPanel getPanel() { +// if (panel == null) { +// panel = new FileExtMismatchConfigPanel(); +// } +// return panel; +// } +// +// void changed() { +// if (!changed) { +// changed = true; +// +// try { +// pcs.firePropertyChange(OptionsPanelController.PROP_CHANGED, false, true); +// } +// catch (Exception e) { +// logger.log(Level.SEVERE, "FileExtMismatchOptionsPanelController listener threw exception", e); +// MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to FileExtMismatchOptionsPanelController updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); +// } +// } +// +// try { +// pcs.firePropertyChange(OptionsPanelController.PROP_VALID, null, null); +// } +// catch (Exception e) { +// logger.log(Level.SEVERE, "FileExtMismatchOptionsPanelController listener threw exception", e); +// MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to FileExtMismatchOptionsPanelController updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); +// } +// } +//} diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchSimpleConfigPanel.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchSimpleConfigPanel.java index ff0b7faa60..be6c6852a8 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchSimpleConfigPanel.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchSimpleConfigPanel.java @@ -83,11 +83,11 @@ class FileExtMismatchSimpleConfigPanel extends javax.swing.JPanel { }// //GEN-END:initComponents private void skipNoExtCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_skipNoExtCheckBoxActionPerformed - FileExtMismatchIngestModule.setSkipNoExt(skipNoExtCheckBox.isSelected()); +// FileExtMismatchIngestModule.setSkipNoExt(skipNoExtCheckBox.isSelected()); RJCTODO }//GEN-LAST:event_skipNoExtCheckBoxActionPerformed private void skipTextPlainActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_skipTextPlainActionPerformed - FileExtMismatchIngestModule.setSkipTextPlain(skipTextPlain.isSelected()); +// FileExtMismatchIngestModule.setSkipTextPlain(skipTextPlain.isSelected()); // RJCTODO }//GEN-LAST:event_skipTextPlainActionPerformed diff --git a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchXML.java b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchXML.java index 5283d5c04f..907951b88b 100644 --- a/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchXML.java +++ b/FileExtMismatch/src/org/sleuthkit/autopsy/fileextmismatch/FileExtMismatchXML.java @@ -19,168 +19,167 @@ package org.sleuthkit.autopsy.fileextmismatch; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.logging.Level; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.XMLUtil; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NodeList; +//import java.io.File; +//import java.io.IOException; +//import java.util.ArrayList; +//import java.util.Arrays; +//import java.util.HashMap; +//import java.util.Iterator; +//import java.util.List; +//import java.util.logging.Level; +//import javax.xml.parsers.DocumentBuilder; +//import javax.xml.parsers.DocumentBuilderFactory; +//import javax.xml.parsers.ParserConfigurationException; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.coreutils.XMLUtil; +//import org.w3c.dom.Document; +//import org.w3c.dom.Element; +//import org.w3c.dom.NodeList; /** * Storage of file extension mismatch configuration, which maps mimetypes to * allowable filename extensions. */ -class FileExtMismatchXML { - private static final Logger logger = Logger.getLogger(FileExtMismatchXML.class.getName()); - private static FileExtMismatchXML defaultInstance = null; - - private static final String ENCODING = "UTF-8"; - private static final String XSDFILE = "MismatchConfigSchema.xsd"; - - private static final String ROOT_EL = "mismatch_config"; - private static final String SIG_EL = "signature"; - private static final String EXT_EL = "ext"; - private static final String SIG_MIMETYPE_ATTR = "mimetype"; - - private static final String DEFAULT_CONFIG_FILE_NAME = "mismatch_config.xml"; - - protected String filePath; - - FileExtMismatchXML(String filePath) { - this.filePath = filePath; - - try { - boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error copying default mismatch configuration to user dir ", ex); - } - } - - /** - * Singleton provides default configuration from user's directory; user CAN - * modify this file. - */ - public static FileExtMismatchXML getDefault() { - if (defaultInstance == null) { - final String FILTER_CONFIG_FILE = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_CONFIG_FILE_NAME; - defaultInstance = new FileExtMismatchXML(FILTER_CONFIG_FILE); - } - return defaultInstance; - } - - /** - * Load and parse XML - * - * @return Loaded hash map or null on error or null if data does not exist - */ - public HashMap load() { - HashMap sigTypeToExtMap = new HashMap<>(); - - try - { - final Document doc = XMLUtil.loadDoc(FileExtMismatchXML.class, filePath, XSDFILE); - if (doc == null) { - return null; - } - - Element root = doc.getDocumentElement(); - if (root == null) { - logger.log(Level.SEVERE, "Error loading config file: invalid file format (bad root)."); - return null; - } - - NodeList sigNList = root.getElementsByTagName(SIG_EL); - final int numSigs = sigNList.getLength(); - - if (numSigs == 0) { - return null; - } - - for(int sigIndex = 0; sigIndex < numSigs; ++sigIndex) { - Element sigEl = (Element)sigNList.item(sigIndex); - final String mimetype = sigEl.getAttribute(SIG_MIMETYPE_ATTR); - - NodeList extNList = sigEl.getElementsByTagName(EXT_EL); - final int numExts = extNList.getLength(); - - if (numExts != 0) { - List extStrings = new ArrayList<>(); - for(int extIndex = 0; extIndex < numExts; ++extIndex) { - Element extEl = (Element)extNList.item(extIndex); - extStrings.add(extEl.getTextContent()); - } - String[] sarray = (String[])extStrings.toArray(new String[0]); - sigTypeToExtMap.put(mimetype, sarray); - } else { - sigTypeToExtMap.put(mimetype, null); //ok to have an empty type (the ingest module will not use it) - } - } - - } catch (Exception e) { - logger.log(Level.SEVERE, "Error loading config file.", e); - return null; - } - return sigTypeToExtMap; - } - - - /** - * Save XML to filePath, overwriting it if it already exists - * - * @param sigTypeToExtMap String arrays of extensions mapped to each string mimetype. - * @return Loaded hash map or null on error or null if data does not exist - */ - public boolean save(HashMap sigTypeToExtMap) { - boolean success = false; - - DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); - - try { - DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); - Document doc = docBuilder.newDocument(); - - Element rootEl = doc.createElement(ROOT_EL); - doc.appendChild(rootEl); - - Iterator keyIt = sigTypeToExtMap.keySet().iterator(); - - while (keyIt.hasNext()) { - String key = keyIt.next(); - Element sigEl = doc.createElement(SIG_EL); - sigEl.setAttribute(SIG_MIMETYPE_ATTR, key); - - String[] extArray = sigTypeToExtMap.get(key); - if (extArray != null) { - ArrayList extList = new ArrayList<>(Arrays.asList(extArray)); - for (String ext : extList) { - Element extEl = doc.createElement(EXT_EL); - extEl.setTextContent(ext); - sigEl.appendChild(extEl); - } - } - rootEl.appendChild(sigEl); - } - - success = XMLUtil.saveDoc(FileExtMismatchXML.class, filePath, ENCODING, doc); - - } catch (ParserConfigurationException e) { - logger.log(Level.SEVERE, "Error saving keyword list: can't initialize parser.", e); - success = false; - } - return success; - } - -} +//class FileExtMismatchXML { +// private static final Logger logger = Logger.getLogger(FileExtMismatchXML.class.getName()); +// private static FileExtMismatchXML defaultInstance = null; +// +// private static final String ENCODING = "UTF-8"; +// private static final String XSDFILE = "MismatchConfigSchema.xsd"; +// +// private static final String ROOT_EL = "mismatch_config"; +// private static final String SIG_EL = "signature"; +// private static final String EXT_EL = "ext"; +// private static final String SIG_MIMETYPE_ATTR = "mimetype"; +// +// private static final String DEFAULT_CONFIG_FILE_NAME = "mismatch_config.xml"; +// +// protected String filePath; +// +// FileExtMismatchXML(String filePath) { +// this.filePath = filePath; +// +// try { +// boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error copying default mismatch configuration to user dir ", ex); +// } +// } +// +// /** +// * Singleton provides default configuration from user's directory; user CAN +// * modify this file. +// */ +// public static FileExtMismatchXML getDefault() { +// if (defaultInstance == null) { +// final String FILTER_CONFIG_FILE = PlatformUtil.getUserConfigDirectory() + File.separator + DEFAULT_CONFIG_FILE_NAME; +// defaultInstance = new FileExtMismatchXML(FILTER_CONFIG_FILE); +// } +// return defaultInstance; +// } +// +// /** +// * Load and parse XML +// * +// * @return Loaded hash map or null on error or null if data does not exist +// */ +// public HashMap load() { +// HashMap sigTypeToExtMap = new HashMap<>(); +// +// try +// { +// final Document doc = XMLUtil.loadDoc(FileExtMismatchXML.class, filePath, XSDFILE); +// if (doc == null) { +// return null; +// } +// +// Element root = doc.getDocumentElement(); +// if (root == null) { +// logger.log(Level.SEVERE, "Error loading config file: invalid file format (bad root)."); +// return null; +// } +// +// NodeList sigNList = root.getElementsByTagName(SIG_EL); +// final int numSigs = sigNList.getLength(); +// +// if (numSigs == 0) { +// return null; +// } +// +// for(int sigIndex = 0; sigIndex < numSigs; ++sigIndex) { +// Element sigEl = (Element)sigNList.item(sigIndex); +// final String mimetype = sigEl.getAttribute(SIG_MIMETYPE_ATTR); +// +// NodeList extNList = sigEl.getElementsByTagName(EXT_EL); +// final int numExts = extNList.getLength(); +// +// if (numExts != 0) { +// List extStrings = new ArrayList<>(); +// for(int extIndex = 0; extIndex < numExts; ++extIndex) { +// Element extEl = (Element)extNList.item(extIndex); +// extStrings.add(extEl.getTextContent()); +// } +// String[] sarray = (String[])extStrings.toArray(new String[0]); +// sigTypeToExtMap.put(mimetype, sarray); +// } else { +// sigTypeToExtMap.put(mimetype, null); //ok to have an empty type (the ingest module will not use it) +// } +// } +// +// } catch (Exception e) { +// logger.log(Level.SEVERE, "Error loading config file.", e); +// return null; +// } +// return sigTypeToExtMap; +// } +// +// +// /** +// * Save XML to filePath, overwriting it if it already exists +// * +// * @param sigTypeToExtMap String arrays of extensions mapped to each string mimetype. +// * @return Loaded hash map or null on error or null if data does not exist +// */ +// public boolean save(HashMap sigTypeToExtMap) { +// boolean success = false; +// +// DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); +// +// try { +// DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); +// Document doc = docBuilder.newDocument(); +// +// Element rootEl = doc.createElement(ROOT_EL); +// doc.appendChild(rootEl); +// +// Iterator keyIt = sigTypeToExtMap.keySet().iterator(); +// +// while (keyIt.hasNext()) { +// String key = keyIt.next(); +// Element sigEl = doc.createElement(SIG_EL); +// sigEl.setAttribute(SIG_MIMETYPE_ATTR, key); +// +// String[] extArray = sigTypeToExtMap.get(key); +// if (extArray != null) { +// ArrayList extList = new ArrayList<>(Arrays.asList(extArray)); +// for (String ext : extList) { +// Element extEl = doc.createElement(EXT_EL); +// extEl.setTextContent(ext); +// sigEl.appendChild(extEl); +// } +// } +// rootEl.appendChild(sigEl); +// } +// +// success = XMLUtil.saveDoc(FileExtMismatchXML.class, filePath, ENCODING, doc); +// +// } catch (ParserConfigurationException e) { +// logger.log(Level.SEVERE, "Error saving keyword list: can't initialize parser.", e); +// success = false; +// } +// return success; +// } +// +//} diff --git a/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdIngestModule.java b/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdIngestModule.java index 8ab0b53bbe..b1e4460831 100644 --- a/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdIngestModule.java +++ b/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdIngestModule.java @@ -19,178 +19,177 @@ package org.sleuthkit.autopsy.filetypeid; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.TskData; -import org.sleuthkit.datamodel.TskData.FileKnown; -import org.sleuthkit.datamodel.TskException; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.TskData; +//import org.sleuthkit.datamodel.TskData.FileKnown; +//import org.sleuthkit.datamodel.TskException; /** * Detects the type of a file based on signature (magic) values. * Posts results to the blackboard. */ - public class FileTypeIdIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { - private static FileTypeIdIngestModule defaultInstance = null; - public final static String MODULE_NAME = "File Type Identification"; - public final static String MODULE_DESCRIPTION = "Matches file types based on binary signatures."; - public final static String MODULE_VERSION = Version.getVersion(); - private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName()); - private static long matchTime = 0; - private static int messageId = 0; - private static long numFiles = 0; - private static boolean skipKnown = true; - private static long MIN_FILE_SIZE = 512; - - private FileTypeIdSimpleConfigPanel simpleConfigPanel; - private IngestServices services; - - // The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed. - // If desired in the future to be more knowledgable about weird files or rare formats, we could - // actually have a list of detectors which are called in order until a match is found. - private FileTypeDetectionInterface detector = new TikaFileTypeDetector(); - //private FileTypeDetectionInterface detector = new JMimeMagicFileTypeDetector(); - //private FileTypeDetectionInterface detector = new MimeUtilFileTypeDetector(); - - // Private to ensure Singleton status - private FileTypeIdIngestModule() { - } - - // File-level ingest modules are currently singleton -- this is required - public static synchronized FileTypeIdIngestModule getDefault() { - //defaultInstance is a private static class variable - if (defaultInstance == null) { - defaultInstance = new FileTypeIdIngestModule(); - } - return defaultInstance; - } - - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - } - - @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - // skip non-files - if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || - (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { - - return ProcessResult.OK; - } - - if (skipKnown && (abstractFile.getKnown() == FileKnown.KNOWN)) { - return ProcessResult.OK; - } - - if (abstractFile.getSize() < MIN_FILE_SIZE) { - return ProcessResult.OK; - } - - try - { - long startTime = System.currentTimeMillis(); - FileTypeDetectionInterface.FileIdInfo fileId = detector.attemptMatch(abstractFile); - matchTime += (System.currentTimeMillis() - startTime); - numFiles++; - - if (!fileId.type.isEmpty()) { - // add artifact - BlackboardArtifact bart = abstractFile.getGenInfoArtifact(); - BlackboardAttribute batt = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID(), MODULE_NAME, fileId.type); - bart.addAttribute(batt); - - // we don't fire the event because we just updated TSK_GEN_INFO, which isn't displayed in the tree and is vague. - } - return ProcessResult.OK; - } catch (TskException ex) { - logger.log(Level.WARNING, "Error matching file signature", ex); - return ProcessResult.ERROR; - } - catch (Exception e) { - logger.log(Level.WARNING, "Error matching file signature", e); - return ProcessResult.ERROR; - } - } - - - @Override - public void complete() { - StringBuilder detailsSb = new StringBuilder(); - //details - detailsSb.append(""); - - detailsSb.append(""); - - detailsSb.append("\n"); - detailsSb.append("\n"); - detailsSb.append("
"+MODULE_DESCRIPTION+"
Total Processing Time").append(matchTime).append("
Total Files Processed").append(numFiles).append("
"); - - services.postMessage(IngestMessage.createMessage(++messageId, IngestMessage.MessageType.INFO, this, "File Type Id Results", detailsSb.toString())); - } - - @Override - public void stop() { - //do nothing - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public boolean hasSimpleConfiguration() { - return true; - } - - @Override - public javax.swing.JPanel getSimpleConfiguration(String context) { - if (simpleConfigPanel == null) { - simpleConfigPanel = new FileTypeIdSimpleConfigPanel(); - } - - return simpleConfigPanel; - } - - @Override - public boolean hasBackgroundJobsRunning() { - // we're single threaded... - return false; - } - - public static void setSkipKnown(boolean flag) { - skipKnown = flag; - } - - /** - * Validate if a given mime type is in the detector's registry. - * @param mimeType Full string of mime type, e.g. "text/html" - * @return true if detectable - */ - public static boolean isMimeTypeDetectable(String mimeType) { - FileTypeDetectionInterface detector = new TikaFileTypeDetector(); - return detector.isMimeTypeDetectable(mimeType); - } - -} \ No newline at end of file +// public class FileTypeIdIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile { +// private static FileTypeIdIngestModule defaultInstance = null; +// public final static String MODULE_NAME = "File Type Identification"; +// public final static String MODULE_DESCRIPTION = "Matches file types based on binary signatures."; +// public final static String MODULE_VERSION = Version.getVersion(); +// private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName()); +// private static long matchTime = 0; +// private static int messageId = 0; +// private static long numFiles = 0; +// private static boolean skipKnown = true; +// private static long MIN_FILE_SIZE = 512; +// +// private FileTypeIdSimpleConfigPanel simpleConfigPanel; +// private IngestServices services; +// +// // The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed. +// // If desired in the future to be more knowledgable about weird files or rare formats, we could +// // actually have a list of detectors which are called in order until a match is found. +// private FileTypeDetectionInterface detector = new TikaFileTypeDetector(); +// //private FileTypeDetectionInterface detector = new JMimeMagicFileTypeDetector(); +// //private FileTypeDetectionInterface detector = new MimeUtilFileTypeDetector(); +// +// // Private to ensure Singleton status +// private FileTypeIdIngestModule() { +// } +// +// // File-level ingest modules are currently singleton -- this is required +// public static synchronized FileTypeIdIngestModule getDefault() { +// //defaultInstance is a private static class variable +// if (defaultInstance == null) { +// defaultInstance = new FileTypeIdIngestModule(); +// } +// return defaultInstance; +// } +// +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// } +// +// @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// // skip non-files +// if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || +// (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) { +// +// return ProcessResult.OK; +// } +// +// if (skipKnown && (abstractFile.getKnown() == FileKnown.KNOWN)) { +// return ProcessResult.OK; +// } +// +// if (abstractFile.getSize() < MIN_FILE_SIZE) { +// return ProcessResult.OK; +// } +// +// try +// { +// long startTime = System.currentTimeMillis(); +// FileTypeDetectionInterface.FileIdInfo fileId = detector.attemptMatch(abstractFile); +// matchTime += (System.currentTimeMillis() - startTime); +// numFiles++; +// +// if (!fileId.type.isEmpty()) { +// // add artifact +// BlackboardArtifact bart = abstractFile.getGenInfoArtifact(); +// BlackboardAttribute batt = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_FILE_TYPE_SIG.getTypeID(), MODULE_NAME, fileId.type); +// bart.addAttribute(batt); +// +// // we don't fire the event because we just updated TSK_GEN_INFO, which isn't displayed in the tree and is vague. +// } +// return ProcessResult.OK; +// } catch (TskException ex) { +// logger.log(Level.WARNING, "Error matching file signature", ex); +// return ProcessResult.ERROR; +// } +// catch (Exception e) { +// logger.log(Level.WARNING, "Error matching file signature", e); +// return ProcessResult.ERROR; +// } +// } +// +// +// @Override +// public void complete() { +// StringBuilder detailsSb = new StringBuilder(); +// //details +// detailsSb.append(""); +// +// detailsSb.append(""); +// +// detailsSb.append("\n"); +// detailsSb.append("\n"); +// detailsSb.append("
"+MODULE_DESCRIPTION+"
Total Processing Time").append(matchTime).append("
Total Files Processed").append(numFiles).append("
"); +// +// services.postMessage(IngestMessage.createMessage(++messageId, IngestMessage.MessageType.INFO, this, "File Type Id Results", detailsSb.toString())); +// } +// +// @Override +// public void stop() { +// //do nothing +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public boolean hasSimpleConfiguration() { +// return true; +// } +// +// @Override +// public javax.swing.JPanel getSimpleConfiguration(String context) { +// if (simpleConfigPanel == null) { +// simpleConfigPanel = new FileTypeIdSimpleConfigPanel(); +// } +// +// return simpleConfigPanel; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// // we're single threaded... +// return false; +// } +// +// public static void setSkipKnown(boolean flag) { +// skipKnown = flag; +// } +// +// /** +// * Validate if a given mime type is in the detector's registry. +// * @param mimeType Full string of mime type, e.g. "text/html" +// * @return true if detectable +// */ +// public static boolean isMimeTypeDetectable(String mimeType) { +// FileTypeDetectionInterface detector = new TikaFileTypeDetector(); +// return detector.isMimeTypeDetectable(mimeType); +// } +// +//} \ No newline at end of file diff --git a/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdSimpleConfigPanel.java b/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdSimpleConfigPanel.java index 980f4e4f41..1222dd2960 100644 --- a/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdSimpleConfigPanel.java +++ b/FileTypeId/src/org/sleuthkit/autopsy/filetypeid/FileTypeIdSimpleConfigPanel.java @@ -72,7 +72,7 @@ package org.sleuthkit.autopsy.filetypeid; }// //GEN-END:initComponents private void skipKnownCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_skipKnownCheckBoxActionPerformed - FileTypeIdIngestModule.setSkipKnown(skipKnownCheckBox.isSelected()); +// FileTypeIdIngestModule.setSkipKnown(skipKnownCheckBox.isSelected()); RJCTODO }//GEN-LAST:event_skipKnownCheckBoxActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index c18916b250..56da384573 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -60,6 +60,11 @@ public class HashDbIngestModule implements FileIngestModule { HashDbIngestModule() { } + @Override + public String getDisplayName() { + return HashLookupModuleFactory.getModuleName(); + } + @Override public void init(long dataSourceTaskId) { services = IngestServices.getDefault(); diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java index cd02323ee1..b51fef4e14 100755 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashLookupModuleFactory.java @@ -36,7 +36,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactory; public class HashLookupModuleFactory extends AbstractIngestModuleFactory { @Override public String getModuleDisplayName() { - return NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); + return getModuleName(); + } + + static String getModuleName() { + return NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); } @Override diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractKeywordSearchPerformer.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractKeywordSearchPerformer.java index 7e9441c945..34f9fb8365 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractKeywordSearchPerformer.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractKeywordSearchPerformer.java @@ -81,10 +81,10 @@ abstract class AbstractKeywordSearchPerformer extends javax.swing.JPanel impleme @Override public void search() { - boolean isRunning = IngestManager.getDefault().isModuleRunning(KeywordSearchIngestModule.getDefault()); + boolean isIngestRunning = IngestManager.getDefault().isIngestRunning(); if (filesIndexed == 0) { - if (isRunning) { + if (isIngestRunning) { KeywordSearchUtil.displayDialog(keywordSearchErrorDialogHeader, NbBundle.getMessage(this.getClass(), "AbstractKeywordSearchPerformer.search.noFilesInIdxMsg", KeywordSearchSettings.getUpdateFrequency().getTime()), KeywordSearchUtil.DIALOG_MESSAGE_TYPE.ERROR); @@ -96,7 +96,7 @@ abstract class AbstractKeywordSearchPerformer extends javax.swing.JPanel impleme } //check if keyword search module ingest is running (indexing, etc) - if (isRunning) { + if (isIngestRunning) { if (KeywordSearchUtil.displayConfirmDialog(org.openide.util.NbBundle.getMessage(this.getClass(), "AbstractKeywordSearchPerformer.search.searchIngestInProgressTitle"), NbBundle.getMessage(this.getClass(), "AbstractKeywordSearchPerformer.search.ingestInProgressBody"), KeywordSearchUtil.DIALOG_MESSAGE_TYPE.WARN) == false) { return; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java index e0b4dc0d89..ed6cdcf284 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel2.java @@ -44,8 +44,7 @@ class KeywordSearchConfigurationPanel2 extends javax.swing.JPanel implements Opt private void activateWidgets() { skipNSRLCheckBox.setSelected(KeywordSearchSettings.getSkipKnown()); - boolean enable = !IngestManager.getDefault().isIngestRunning() - && !IngestManager.getDefault().isModuleRunning(KeywordSearchIngestModule.getDefault()); + boolean enable = !IngestManager.getDefault().isIngestRunning() && !IngestManager.getDefault().isIngestRunning(); skipNSRLCheckBox.setEnabled(enable); setTimeSettingEnabled(enable); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel3.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel3.java index ff736bfc2e..cd74942a82 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel3.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel3.java @@ -149,8 +149,7 @@ class KeywordSearchConfigurationPanel3 extends javax.swing.JPanel implements Opt enableUTF8Checkbox.setSelected(utf8); final boolean extractEnabled = utf16 || utf8; - boolean ingestNotRunning = !IngestManager.getDefault().isIngestRunning() - && ! IngestManager.getDefault().isModuleRunning(KeywordSearchIngestModule.getDefault()); + boolean ingestNotRunning = !IngestManager.getDefault().isIngestRunning() && !IngestManager.getDefault().isIngestRunning(); //enable / disable checboxes activateScriptsCheckboxes(extractEnabled && ingestNotRunning); enableUTF16Checkbox.setEnabled(ingestNotRunning); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java index 2cf89f27c5..21b678109c 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java @@ -161,7 +161,7 @@ class KeywordSearchEditListPanel extends javax.swing.JPanel implements ListSelec - if (IngestManager.getDefault().isModuleRunning(KeywordSearchIngestModule.getDefault())) { + if (IngestManager.getDefault().isIngestRunning()) { initIngest(0); } else { initIngest(1); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 1caa9e47cd..c25df335d4 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -16,57 +16,56 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.keywordsearch; -import java.awt.event.ActionEvent; -import java.awt.event.ActionListener; -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.CancellationException; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReentrantReadWriteLock; -import java.util.logging.Level; - -import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.coreutils.Logger; -import javax.swing.SwingUtilities; -import javax.swing.SwingWorker; -import javax.swing.Timer; -import org.apache.tika.Tika; -import org.netbeans.api.progress.aggregate.AggregateProgressFactory; -import org.netbeans.api.progress.aggregate.AggregateProgressHandle; -import org.netbeans.api.progress.aggregate.ProgressContributor; -import org.openide.util.Cancellable; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.EscapeUtil; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import org.sleuthkit.autopsy.coreutils.StopWatch; -import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.ReadContentInputStream; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; -import org.sleuthkit.datamodel.TskData.FileKnown; +//package org.sleuthkit.autopsy.keywordsearch; +// +//import java.awt.event.ActionEvent; +//import java.awt.event.ActionListener; +//import java.io.IOException; +//import java.io.InputStream; +//import java.util.ArrayList; +//import java.util.Collection; +//import java.util.HashMap; +//import java.util.HashSet; +//import java.util.List; +//import java.util.Map; +//import java.util.Set; +//import java.util.concurrent.CancellationException; +//import java.util.concurrent.locks.Lock; +//import java.util.concurrent.locks.ReentrantReadWriteLock; +//import java.util.logging.Level; +//import org.openide.util.NbBundle; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import javax.swing.SwingUtilities; +//import javax.swing.SwingWorker; +//import javax.swing.Timer; +//import org.apache.tika.Tika; +//import org.netbeans.api.progress.aggregate.AggregateProgressFactory; +//import org.netbeans.api.progress.aggregate.AggregateProgressHandle; +//import org.netbeans.api.progress.aggregate.ProgressContributor; +//import org.openide.util.Cancellable; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.coreutils.EscapeUtil; +//import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +//import org.sleuthkit.autopsy.coreutils.StopWatch; +//import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.ReadContentInputStream; +//import org.sleuthkit.datamodel.SleuthkitCase; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; +//import org.sleuthkit.datamodel.TskData.FileKnown; /** * An ingest module on a file level Performs indexing of allocated and Solr @@ -78,1202 +77,1202 @@ import org.sleuthkit.datamodel.TskData.FileKnown; * * Registered as a module in layer.xml */ -public final class KeywordSearchIngestModule extends IngestModuleAbstractFile { - - enum UpdateFrequency { - - FAST(20), - AVG(10), - SLOW(5), - SLOWEST(1), - DEFAULT(5); - private final int time; - - UpdateFrequency(int time) { - this.time = time; - } - - int getTime() { - return time; - } - }; - private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); - public static final String MODULE_NAME = NbBundle.getMessage(KeywordSearchIngestModule.class, - "KeywordSearchIngestModule.moduleName"); - public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, - "KeywordSearchIngestModule.moduleDescription"); - final public static String MODULE_VERSION = Version.getVersion(); - private static KeywordSearchIngestModule instance = null; - private IngestServices services; - private Ingester ingester = null; - private volatile boolean commitIndex = false; //whether to commit index next time - private volatile boolean runSearcher = false; //whether to run searcher next time - private List keywords; //keywords to search - private List keywordLists; // lists currently being searched - private Map keywordToList; //keyword to list name mapping - private Timer commitTimer; - private Timer searchTimer; - private Indexer indexer; - private Searcher currentSearcher; - private Searcher finalSearcher; - private volatile boolean searcherDone = true; //mark as done, until it's inited - private Map> currentResults; - //only search images from current ingest, not images previously ingested/indexed - //accessed read-only by searcher thread - private Set curDataSourceIds; - private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy - private static final Lock searcherLock = rwLock.writeLock(); - private volatile int messageID = 0; - private boolean processedFiles; - private volatile boolean finalSearcherDone = true; //mark as done, until it's inited - private final String hashDBModuleName = NbBundle - .getMessage(this.getClass(), "KeywordSearchIngestModule.hashDbModuleName"); //NOTE this needs to match the HashDB module getName() - private SleuthkitCase caseHandle = null; - private static List textExtractors; - private static AbstractFileStringExtract stringExtractor; - private boolean initialized = false; - private KeywordSearchIngestSimplePanel simpleConfigPanel; - private KeywordSearchConfigurationPanel advancedConfigPanel; - private Tika tikaFormatDetector; - - - private enum IngestStatus { - TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested - STRINGS_INGESTED, ///< Strings were extracted from file - METADATA_INGESTED, ///< No content, so we just text_ingested metadata - SKIPPED_ERROR_INDEXING, ///< File was skipped because index engine had problems - SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues - SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it - }; - private Map ingestStatus; - - //private constructor to ensure singleton instance - private KeywordSearchIngestModule() { - } - - /** - * Returns singleton instance of the module, creates one if needed - * - * @return instance of the module - */ - public static synchronized KeywordSearchIngestModule getDefault() { - if (instance == null) { - instance = new KeywordSearchIngestModule(); - } - return instance; - } - - @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - - if (initialized == false) //error initializing indexing/Solr - { - logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); - ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - return ProcessResult.OK; - } - try { - //add data source id of the file to the set, keeping track of images being ingested - final long fileSourceId = caseHandle.getFileDataSource(abstractFile); - curDataSourceIds.add(fileSourceId); - - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error getting image id of file processed by keyword search: " + abstractFile.getName(), ex); - } - - if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { - //skip indexing of virtual dirs (no content, no real name) - will index children files - return ProcessResult.OK; - } - - //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it - if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { - indexer.indexFile(abstractFile, false); - //notify depending module that keyword search (would) encountered error for this file - ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_IO); - return ProcessResult.ERROR; - } - else if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { - //index meta-data only - indexer.indexFile(abstractFile, false); - return ProcessResult.OK; - } - - processedFiles = true; - - //check if it's time to commit after previous processing - checkRunCommitSearch(); - - //index the file and content (if the content is supported) - indexer.indexFile(abstractFile, true); - - return ProcessResult.OK; - } - - /** - * After all files are ingested, execute final index commit and final search - * Cleanup resources, threads, timers - */ - @Override - public void complete() { - if (initialized == false) { - return; - } - - //logger.log(Level.INFO, "complete()"); - commitTimer.stop(); - - //NOTE, we let the 1 before last searcher complete fully, and enqueue the last one - - //cancel searcher timer, ensure unwanted searcher does not start - //before we start the final one - if (searchTimer.isRunning()) { - searchTimer.stop(); - } - runSearcher = false; - - logger.log(Level.INFO, "Running final index commit and search"); - //final commit - commit(); - - postIndexSummary(); - - //run one last search as there are probably some new files committed - if (keywordLists != null && !keywordLists.isEmpty() && processedFiles == true) { - finalSearcher = new Searcher(keywordLists, true); //final searcher run - finalSearcher.execute(); - } else { - finalSearcherDone = true; - } - - //log number of files / chunks in index - //signal a potential change in number of text_ingested files - try { - final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); - final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); - logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); - logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); - } catch (NoOpenCoreException ex) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); - } - - //cleanup done in final searcher - - //postSummary(); - } - - /** - * Handle stop event (ingest interrupted) Cleanup resources, threads, timers - */ - @Override - public void stop() { - logger.log(Level.INFO, "stop()"); - - //stop timer - commitTimer.stop(); - //stop currentSearcher - if (currentSearcher != null) { - currentSearcher.cancel(true); - } - - //cancel searcher timer, ensure unwanted searcher does not start - if (searchTimer.isRunning()) { - searchTimer.stop(); - } - runSearcher = false; - finalSearcherDone = true; - - - //commit uncommited files, don't search again - commit(); - - //postSummary(); - - cleanup(); - } - - /** - * Common cleanup code when module stops or final searcher completes - */ - private void cleanup() { - ingestStatus.clear(); - currentResults.clear(); - curDataSourceIds.clear(); - currentSearcher = null; - //finalSearcher = null; //do not collect, might be finalizing - - commitTimer.stop(); - searchTimer.stop(); - commitTimer = null; - //searchTimer = null; // do not collect, final searcher might still be running, in which case it throws an exception - - textExtractors.clear(); - textExtractors = null; - stringExtractor = null; - - keywords.clear(); - keywordLists.clear(); - keywordToList.clear(); - - tikaFormatDetector = null; - - initialized = false; - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - /** - * Initializes the module for new ingest run Sets up threads, timers, - * retrieves settings, keyword lists to run on - * - */ - @Override - public void init(IngestModuleInit initContext) { - logger.log(Level.INFO, "init()"); - services = IngestServices.getDefault(); - initialized = false; - - caseHandle = Case.getCurrentCase().getSleuthkitCase(); - - tikaFormatDetector = new Tika(); - - ingester = Server.getIngester(); - - final Server server = KeywordSearch.getServer(); - try { - if (!server.isRunning()) { - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - logger.log(Level.SEVERE, msg); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - - } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); - //this means Solr is not properly initialized - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - } - - - //initialize extractors - stringExtractor = new AbstractFileStringExtract(); - stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); - stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); - - - //log the scripts used for debugging - final StringBuilder sbScripts = new StringBuilder(); - for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { - sbScripts.append(s.name()).append(" "); - } - logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); - - textExtractors = new ArrayList(); - //order matters, more specific extractors first - textExtractors.add(new AbstractFileHtmlExtract()); - textExtractors.add(new AbstractFileTikaTextExtract()); - - - ingestStatus = new HashMap(); - - keywords = new ArrayList(); - keywordLists = new ArrayList(); - keywordToList = new HashMap(); - - initKeywords(); - - if (keywords.isEmpty() || keywordLists.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } - - processedFiles = false; - finalSearcherDone = false; - searcherDone = true; //make sure to start the initial currentSearcher - //keeps track of all results per run not to repeat reporting the same hits - currentResults = new HashMap>(); - - curDataSourceIds = new HashSet(); - - indexer = new Indexer(); - - final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; - logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); - logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); - - commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); - searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); - - initialized = true; - - commitTimer.start(); - searchTimer.start(); - } - - @Override - public boolean hasSimpleConfiguration() { - return true; - } - - @Override - public boolean hasAdvancedConfiguration() { - return true; - } - - @Override - public javax.swing.JPanel getSimpleConfiguration(String context) { - KeywordSearchListsXML.getCurrent().reload(); - - if (null == simpleConfigPanel) { - simpleConfigPanel = new KeywordSearchIngestSimplePanel(); - } - else { - simpleConfigPanel.load(); - } - - return simpleConfigPanel; - } - - @Override - public javax.swing.JPanel getAdvancedConfiguration(String context) { - if (advancedConfigPanel == null) { - advancedConfigPanel = new KeywordSearchConfigurationPanel(); - } - - advancedConfigPanel.load(); - return advancedConfigPanel; - } - - @Override - public void saveAdvancedConfiguration() { - if (advancedConfigPanel != null) { - advancedConfigPanel.store(); - } - - if (simpleConfigPanel != null) { - simpleConfigPanel.load(); - } - } - - @Override - public void saveSimpleConfiguration() { - KeywordSearchListsXML.getCurrent().save(); - } - - /** - * The modules maintains background threads, return true if background - * threads are running or there are pending tasks to be run in the future, - * such as the final search post-ingest completion - * - * @return - */ - @Override - public boolean hasBackgroundJobsRunning() { - if ((currentSearcher != null && searcherDone == false) - || (finalSearcherDone == false)) { - return true; - } else { - return false; - } - - } - - /** - * Commits index and notifies listeners of index update - */ - private void commit() { - if (initialized) { - logger.log(Level.INFO, "Commiting index"); - ingester.commit(); - logger.log(Level.INFO, "Index comitted"); - //signal a potential change in number of text_ingested files - indexChangeNotify(); - } - } - - /** - * Posts inbox message with summary of text_ingested files - */ - private void postIndexSummary() { - int text_ingested = 0; - int metadata_ingested = 0; - int strings_ingested = 0; - int error_text = 0; - int error_index = 0; - int error_io = 0; - for (IngestStatus s : ingestStatus.values()) { - switch (s) { - case TEXT_INGESTED: - ++text_ingested; - break; - case METADATA_INGESTED: - ++metadata_ingested; - break; - case STRINGS_INGESTED: - ++strings_ingested; - break; - case SKIPPED_ERROR_TEXTEXTRACT: - error_text++; - break; - case SKIPPED_ERROR_INDEXING: - error_index++; - break; - case SKIPPED_ERROR_IO: - error_io++; - break; - default: - ; - } - } - - StringBuilder msg = new StringBuilder(); - msg.append(""); - msg.append(""); - msg.append(""); - msg.append(""); - msg.append(""); - msg.append(""); - msg.append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("").append(text_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.fileGenStringsHead")).append("").append(strings_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.mdOnlyLbl")).append("").append(metadata_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrLbl")).append("").append(error_index).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errTxtLbl")).append("").append(error_text).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append("
"); - String indexStats = msg.toString(); - logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); - services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats)); - if (error_index > 0) { - MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrsTitle"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrMsgFiles", error_index)); - } - else if (error_io + error_text > 0) { - MessageNotifyUtil.Notify.warn(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxWarnMsgTitle"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrReadFilesMsg")); - } - } - - /** - * Helper method to notify listeners on index update - */ - private void indexChangeNotify() { - //signal a potential change in number of text_ingested files - try { - final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); - KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); - } catch (NoOpenCoreException ex) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); - } - } - - /** - * Initialize the keyword search lists and associated keywords from the XML - * loader Use the lists to ingest that are set in the permanent XML - * configuration - */ - private void initKeywords() { - addKeywordLists(null); - } - - /** - * If ingest is ongoing, this will add additional keyword search lists to - * the ongoing ingest The lists to add may be temporary and not necessary - * set to be added to ingest permanently in the XML configuration. The lists - * will be reset back to original (permanent configuration state) on the - * next ingest. - * - * @param listsToAdd lists to add temporarily to the ongoing ingest - */ - void addKeywordLists(List listsToAdd) { - KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); - - keywords.clear(); - keywordLists.clear(); - keywordToList.clear(); - - StringBuilder sb = new StringBuilder(); - - for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { - final String listName = list.getName(); - if (list.getUseForIngest() == true - || (listsToAdd != null && listsToAdd.contains(listName))) { - keywordLists.add(listName); - sb.append(listName).append(" "); - } - for (Keyword keyword : list.getKeywords()) { - if (!keywords.contains(keyword)) { - keywords.add(keyword); - keywordToList.put(keyword.getQuery(), list); - } - } - - } - - logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); - - } - - List getKeywordLists() { - return keywordLists == null ? new ArrayList() : keywordLists; - } - - /** - * Check if time to commit, if so, run commit. Then run search if search - * timer is also set. - */ - void checkRunCommitSearch() { - if (commitIndex) { - logger.log(Level.INFO, "Commiting index"); - commit(); - commitIndex = false; - - //after commit, check if time to run searcher - //NOTE commit/searcher timings don't need to align - //in worst case, we will run search next time after commit timer goes off, or at the end of ingest - if (searcherDone && runSearcher) { - //start search if previous not running - if (keywordLists != null && !keywordLists.isEmpty()) { - currentSearcher = new Searcher(keywordLists); - currentSearcher.execute();//searcher will stop timer and restart timer when done - } - } - } - } - - /** - * CommitTimerAction to run by commitTimer Sets a flag to indicate we are - * ready for commit - */ - private class CommitTimerAction implements ActionListener { - - private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); - - @Override - public void actionPerformed(ActionEvent e) { - commitIndex = true; - logger.log(Level.INFO, "CommitTimer awake"); - } - } - - /** - * SearchTimerAction to run by searchTimer Sets a flag to indicate we are - * ready to search - */ - private class SearchTimerAction implements ActionListener { - - private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); - - @Override - public void actionPerformed(ActionEvent e) { - runSearcher = true; - logger.log(Level.INFO, "SearchTimer awake"); - } - } - - /** - * File indexer, processes and indexes known/allocated files, - * unknown/unallocated files and directories accordingly - */ - private class Indexer { - - private final Logger logger = Logger.getLogger(Indexer.class.getName()); - - /** - * Extract text with Tika or other text extraction modules (by - * streaming) from the file Divide the file into chunks and index the - * chunks - * - * @param aFile file to extract strings from, divide into chunks and - * index - * @param detectedFormat mime-type detected, or null if none detected - * @return true if the file was text_ingested, false otherwise - * @throws IngesterException exception thrown if indexing failed - */ - private boolean extractTextAndIndex(AbstractFile aFile, String detectedFormat) throws IngesterException { - AbstractFileExtract fileExtract = null; - - //go over available text extractors in order, and pick the first one (most specific one) - for (AbstractFileExtract fe : textExtractors) { - if (fe.isSupported(aFile, detectedFormat)) { - fileExtract = fe; - break; - } - } - - if (fileExtract == null) { - logger.log(Level.INFO, "No text extractor found for file id:" - + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); - return false; - } - - //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); - - //divide into chunks and index - return fileExtract.index(aFile); - } - - /** - * Extract strings using heuristics from the file and add to index. - * - * @param aFile file to extract strings from, divide into chunks and - * index - * @return true if the file was text_ingested, false otherwise - */ - private boolean extractStringsAndIndex(AbstractFile aFile) { - try { - if (stringExtractor.index(aFile)) { - ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); - return true; - } else { - logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); - return false; - } - } catch (IngesterException ex) { - logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - return false; - } - } - - /** - * Check with every extractor if it supports the file with the detected - * format - * - * @param aFile file to check for - * @param detectedFormat mime-type with detected format (such as - * text/plain) or null if not detected - * @return true if text extraction is supported - */ - private boolean isTextExtractSupported(AbstractFile aFile, String detectedFormat) { - for (AbstractFileExtract extractor : textExtractors) { - if (extractor.isContentTypeSpecific() == true - && extractor.isSupported(aFile, detectedFormat)) { - return true; - } - } - return false; - } - - /** - * Adds the file to the index. Detects file type, calls extractors, etc. - * - * @param aFile File to analyze - * @param indexContent False if only metadata should be text_ingested. True if - * content and metadata should be index. - */ - private void indexFile(AbstractFile aFile, boolean indexContent) { - //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); - - TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); - - // unallocated and unused blocks can only have strings extracted from them. - if ((aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS))) { - extractStringsAndIndex(aFile); - } - - final long size = aFile.getSize(); - //if not to index content, or a dir, or 0 content, index meta data only - if ((indexContent == false || aFile.isDir() || size == 0)) { - try { - ingester.ingest(aFile, false); //meta-data only - ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); - } - catch (IngesterException ex) { - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); - } - return; - } - - //use Tika to detect the format - String detectedFormat = null; - InputStream is = null; - try { - is = new ReadContentInputStream(aFile); - detectedFormat = tikaFormatDetector.detect(is, aFile.getName()); - } - catch (Exception e) { - logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e); - } - finally { - if (is != null) { - try { - is.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Could not close stream after detecting format using tika for file: " - + aFile, ex); - } - } - } - - // @@@ Add file type signature to blackboard here - - //logger.log(Level.INFO, "Detected format: " + aFile.getName() + " " + detectedFormat); - - // we skip archive formats that are opened by the archive module. - // @@@ We could have a check here to see if the archive module was enabled though... - if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) { - try { - ingester.ingest(aFile, false); //meta-data only - ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); - } - catch (IngesterException ex) { - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); - } - return; - } - - boolean wasTextAdded = false; - if (isTextExtractSupported(aFile, detectedFormat)) { - //extract text with one of the extractors, divide into chunks and index with Solr - try { - //logger.log(Level.INFO, "indexing: " + aFile.getName()); - if (!extractTextAndIndex(aFile, detectedFormat)) { - logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); - } else { - ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); - wasTextAdded = true; - } - - } catch (IngesterException e) { - logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", " - + aFile.getName(), e); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - } catch (Exception e) { - logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", " - + aFile.getName(), e); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); - } - } - - // if it wasn't supported or had an error, default to strings - if (wasTextAdded == false) { - extractStringsAndIndex(aFile); - } - } - } - - /** - * Searcher responsible for searching the current index and writing results - * to blackboard and the inbox. Also, posts results to listeners as Ingest - * data events. Searches entire index, and keeps track of only new results - * to report and save. Runs as a background thread. - */ - private final class Searcher extends SwingWorker { - - /** - * Searcher has private copies/snapshots of the lists and keywords - */ - private List keywords; //keywords to search - private List keywordLists; // lists currently being searched - private Map keywordToList; //keyword to list name mapping - private AggregateProgressHandle progressGroup; - private final Logger logger = Logger.getLogger(Searcher.class.getName()); - private boolean finalRun = false; - - Searcher(List keywordLists) { - this.keywordLists = new ArrayList(keywordLists); - this.keywords = new ArrayList(); - this.keywordToList = new HashMap(); - //keywords are populated as searcher runs - } - - Searcher(List keywordLists, boolean finalRun) { - this(keywordLists); - this.finalRun = finalRun; - } - - @Override - protected Object doInBackground() throws Exception { - if (finalRun) { - logger.log(Level.INFO, "Pending start of new (final) searcher"); - } else { - logger.log(Level.INFO, "Pending start of new searcher"); - } - - final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + - (finalRun ? (" - "+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); - progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("+ - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") +")"), null, new Cancellable() { - @Override - public boolean cancel() { - logger.log(Level.INFO, "Cancelling the searcher by user."); - if (progressGroup != null) { - progressGroup.setDisplayName(displayName + " ("+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.cancelMsg") +"...)"); - } - return Searcher.this.cancel(true); - } - }, null); - - updateKeywords(); - - ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; - int i = 0; - for (Keyword keywordQuery : keywords) { - subProgresses[i] = - AggregateProgressFactory.createProgressContributor(keywordQuery.getQuery()); - progressGroup.addContributor(subProgresses[i]); - i++; - } - - progressGroup.start(); - - //block to ensure previous searcher is completely done with doInBackground() - //even after previous searcher cancellation, we need to check this - searcherLock.lock(); - final StopWatch stopWatch = new StopWatch(); - stopWatch.start(); - try { - logger.log(Level.INFO, "Started a new searcher"); - progressGroup.setDisplayName(displayName); - //make sure other searchers are not spawned - searcherDone = false; - runSearcher = false; - if (searchTimer.isRunning()) { - searchTimer.stop(); - } - - int keywordsSearched = 0; - - //updateKeywords(); - - for (Keyword keywordQuery : keywords) { - if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); - return null; - } - - final String queryStr = keywordQuery.getQuery(); - final KeywordSearchListsAbstract.KeywordSearchList list = keywordToList.get(queryStr); - final String listName = list.getName(); - - //new subProgress will be active after the initial query - //when we know number of hits to start() with - if (keywordsSearched > 0) { - subProgresses[keywordsSearched - 1].finish(); - } - - - KeywordSearchQuery del = null; - - boolean isRegex = !keywordQuery.isLiteral(); - if (isRegex) { - del = new TermComponentQuery(keywordQuery); - } - else { - del = new LuceneQuery(keywordQuery); - del.escape(); - } - - //limit search to currently ingested data sources - //set up a filter with 1 or more image ids OR'ed - final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); - del.addFilter(dataSourceFilter); - - Map> queryResult = null; - - try { - queryResult = del.performQuery(); - } catch (NoOpenCoreException ex) { - logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); - //no reason to continue with next query if recovery failed - //or wait for recovery to kick in and run again later - //likely case has closed and threads are being interrupted - return null; - } catch (CancellationException e) { - logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); - return null; - } catch (Exception e) { - logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); - continue; - } - - // calculate new results but substracting results already obtained in this ingest - // this creates a map of each keyword to the list of unique files that have that hit. - Map> newResults = filterResults(queryResult, isRegex); - - if (!newResults.isEmpty()) { - - //write results to BB - - //new artifacts created, to report to listeners - Collection newArtifacts = new ArrayList(); - - //scale progress bar more more granular, per result sub-progress, within per keyword - int totalUnits = newResults.size(); - subProgresses[keywordsSearched].start(totalUnits); - int unitProgress = 0; - String queryDisplayStr = keywordQuery.getQuery(); - if (queryDisplayStr.length() > 50) { - queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; - } - subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress); - - - /* cycle through the keywords returned -- only one unless it was a regexp */ - for (final Keyword hitTerm : newResults.keySet()) { - //checking for cancellation between results - if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); - return null; - } - - // update progress display - String hitDisplayStr = hitTerm.getQuery(); - if (hitDisplayStr.length() > 50) { - hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; - } - subProgresses[keywordsSearched].progress(listName + ": " + hitDisplayStr, unitProgress); - //subProgresses[keywordsSearched].progress(unitProgress); - - // this returns the unique files in the set with the first chunk that has a hit - Map contentHitsFlattened = ContentHit.flattenResults(newResults.get(hitTerm)); - for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { - - // get the snippet for the first hit in the file - String snippet = null; - final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); - int chunkId = contentHitsFlattened.get(hitFile); - try { - snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); - } catch (NoOpenCoreException e) { - logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); - //no reason to continue - return null; - } catch (Exception e) { - logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); - continue; - } - - // write the blackboard artifact for this keyword in this file - KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); - if (written == null) { - logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); - continue; - } - - newArtifacts.add(written.getArtifact()); - - //generate an ingest inbox message for this keyword in this file - if (list.getIngestMessages()) { - StringBuilder subjectSb = new StringBuilder(); - StringBuilder detailsSb = new StringBuilder(); - //final int hitFiles = newResults.size(); - - if (!keywordQuery.isLiteral()) { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); - } else { - subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); - } - //subjectSb.append("<"); - String uniqueKey = null; - BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); - if (attr != null) { - final String keyword = attr.getValueString(); - subjectSb.append(keyword); - uniqueKey = keyword.toLowerCase(); - } - - //subjectSb.append(">"); - //String uniqueKey = queryStr; - - //details - detailsSb.append(""); - //hit - detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLThLbl")); - detailsSb.append(""); - detailsSb.append(""); - - //preview - attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); - if (attr != null) { - detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); - detailsSb.append(""); - detailsSb.append(""); - - } - - //file - detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); - detailsSb.append(""); - - detailsSb.append(""); - - - //list - attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); - detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); - detailsSb.append(""); - detailsSb.append(""); - - //regex - if (!keywordQuery.isLiteral()) { - attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); - if (attr != null) { - detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); - detailsSb.append(""); - detailsSb.append(""); - - } - } - detailsSb.append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("
").append(attr.getValueString()).append("
").append(attr.getValueString()).append("
"); - - services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); - } - } //for each file hit - - ++unitProgress; - - }//for each hit term - - //update artifact browser - if (!newArtifacts.isEmpty()) { - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); - } - } //if has results - - //reset the status text before it goes away - subProgresses[keywordsSearched].progress(""); - - ++keywordsSearched; - - } //for each keyword - - } //end try block - catch (Exception ex) { - logger.log(Level.WARNING, "searcher exception occurred", ex); - } finally { - try { - finalizeSearcher(); - stopWatch.stop(); - logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); - } finally { - searcherLock.unlock(); - } - } - - return null; - } - - /** - * Sync-up the updated keywords from the currently used lists in the XML - */ - private void updateKeywords() { - KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); - - this.keywords.clear(); - this.keywordToList.clear(); - - for (String name : this.keywordLists) { - KeywordSearchListsAbstract.KeywordSearchList list = loader.getList(name); - for (Keyword k : list.getKeywords()) { - this.keywords.add(k); - this.keywordToList.put(k.getQuery(), list); - } - } - - - } - - //perform all essential cleanup that needs to be done right AFTER doInBackground() returns - //without relying on done() method that is not guaranteed to run after background thread completes - //NEED to call this method always right before doInBackground() returns - /** - * Performs the cleanup that needs to be done right AFTER - * doInBackground() returns without relying on done() method that is not - * guaranteed to run after background thread completes REQUIRED to call - * this method always right before doInBackground() returns - */ - private void finalizeSearcher() { - logger.log(Level.INFO, "Searcher finalizing"); - SwingUtilities.invokeLater(new Runnable() { - @Override - public void run() { - progressGroup.finish(); - } - }); - searcherDone = true; //next currentSearcher can start - - if (finalRun) { - //this is the final searcher - logger.log(Level.INFO, "The final searcher in this ingest done."); - finalSearcherDone = true; - - //run module cleanup - cleanup(); - } else { - //start counting time for a new searcher to start - //unless final searcher is pending - if (finalSearcher == null) { - //we need a new Timer object, because restarting previus will not cause firing of the action - final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; - searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); - searchTimer.start(); - } - } - } - - //calculate new results but substracting results already obtained in this ingest - //update currentResults map with the new results - private Map> filterResults(Map> queryResult, boolean isRegex) { - Map> newResults = new HashMap>(); - - for (String termResult : queryResult.keySet()) { - List queryTermResults = queryResult.get(termResult); - - //translate to list of IDs that we keep track of - List queryTermResultsIDs = new ArrayList(); - for (ContentHit ch : queryTermResults) { - queryTermResultsIDs.add(ch.getId()); - } - - Keyword termResultK = new Keyword(termResult, !isRegex); - List curTermResults = currentResults.get(termResultK); - if (curTermResults == null) { - currentResults.put(termResultK, queryTermResultsIDs); - newResults.put(termResultK, queryTermResults); - } else { - //some AbstractFile hits already exist for this keyword - for (ContentHit res : queryTermResults) { - if (!curTermResults.contains(res.getId())) { - //add to new results - List newResultsFs = newResults.get(termResultK); - if (newResultsFs == null) { - newResultsFs = new ArrayList(); - newResults.put(termResultK, newResultsFs); - } - newResultsFs.add(res); - curTermResults.add(res.getId()); - } - } - } - } - - return newResults; - - } - } -} +//public final class KeywordSearchIngestModule extends IngestModuleAbstractFile { +// +// enum UpdateFrequency { +// +// FAST(20), +// AVG(10), +// SLOW(5), +// SLOWEST(1), +// DEFAULT(5); +// private final int time; +// +// UpdateFrequency(int time) { +// this.time = time; +// } +// +// int getTime() { +// return time; +// } +// }; +// private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); +// public static final String MODULE_NAME = NbBundle.getMessage(KeywordSearchIngestModule.class, +// "KeywordSearchIngestModule.moduleName"); +// public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, +// "KeywordSearchIngestModule.moduleDescription"); +// final public static String MODULE_VERSION = Version.getVersion(); +// private static KeywordSearchIngestModule instance = null; +// private IngestServices services; +// private Ingester ingester = null; +// private volatile boolean commitIndex = false; //whether to commit index next time +// private volatile boolean runSearcher = false; //whether to run searcher next time +// private List keywords; //keywords to search +// private List keywordLists; // lists currently being searched +// private Map keywordToList; //keyword to list name mapping +// private Timer commitTimer; +// private Timer searchTimer; +// private Indexer indexer; +// private Searcher currentSearcher; +// private Searcher finalSearcher; +// private volatile boolean searcherDone = true; //mark as done, until it's inited +// private Map> currentResults; +// //only search images from current ingest, not images previously ingested/indexed +// //accessed read-only by searcher thread +// private Set curDataSourceIds; +// private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy +// private static final Lock searcherLock = rwLock.writeLock(); +// private volatile int messageID = 0; +// private boolean processedFiles; +// private volatile boolean finalSearcherDone = true; //mark as done, until it's inited +// private final String hashDBModuleName = NbBundle +// .getMessage(this.getClass(), "KeywordSearchIngestModule.hashDbModuleName"); //NOTE this needs to match the HashDB module getName() +// private SleuthkitCase caseHandle = null; +// private static List textExtractors; +// private static AbstractFileStringExtract stringExtractor; +// private boolean initialized = false; +// private KeywordSearchIngestSimplePanel simpleConfigPanel; +// private KeywordSearchConfigurationPanel advancedConfigPanel; +// private Tika tikaFormatDetector; +// +// +// private enum IngestStatus { +// TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested +// STRINGS_INGESTED, ///< Strings were extracted from file +// METADATA_INGESTED, ///< No content, so we just text_ingested metadata +// SKIPPED_ERROR_INDEXING, ///< File was skipped because index engine had problems +// SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues +// SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it +// }; +// private Map ingestStatus; +// +// //private constructor to ensure singleton instance +// private KeywordSearchIngestModule() { +// } +// +// /** +// * Returns singleton instance of the module, creates one if needed +// * +// * @return instance of the module +// */ +// public static synchronized KeywordSearchIngestModule getDefault() { +// if (instance == null) { +// instance = new KeywordSearchIngestModule(); +// } +// return instance; +// } +// +// @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// +// if (initialized == false) //error initializing indexing/Solr +// { +// logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); +// ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +// return ProcessResult.OK; +// } +// try { +// //add data source id of the file to the set, keeping track of images being ingested +// final long fileSourceId = caseHandle.getFileDataSource(abstractFile); +// curDataSourceIds.add(fileSourceId); +// +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error getting image id of file processed by keyword search: " + abstractFile.getName(), ex); +// } +// +// if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { +// //skip indexing of virtual dirs (no content, no real name) - will index children files +// return ProcessResult.OK; +// } +// +// //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it +// if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { +// indexer.indexFile(abstractFile, false); +// //notify depending module that keyword search (would) encountered error for this file +// ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_IO); +// return ProcessResult.ERROR; +// } +// else if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { +// //index meta-data only +// indexer.indexFile(abstractFile, false); +// return ProcessResult.OK; +// } +// +// processedFiles = true; +// +// //check if it's time to commit after previous processing +// checkRunCommitSearch(); +// +// //index the file and content (if the content is supported) +// indexer.indexFile(abstractFile, true); +// +// return ProcessResult.OK; +// } +// +// /** +// * After all files are ingested, execute final index commit and final search +// * Cleanup resources, threads, timers +// */ +// @Override +// public void complete() { +// if (initialized == false) { +// return; +// } +// +// //logger.log(Level.INFO, "complete()"); +// commitTimer.stop(); +// +// //NOTE, we let the 1 before last searcher complete fully, and enqueue the last one +// +// //cancel searcher timer, ensure unwanted searcher does not start +// //before we start the final one +// if (searchTimer.isRunning()) { +// searchTimer.stop(); +// } +// runSearcher = false; +// +// logger.log(Level.INFO, "Running final index commit and search"); +// //final commit +// commit(); +// +// postIndexSummary(); +// +// //run one last search as there are probably some new files committed +// if (keywordLists != null && !keywordLists.isEmpty() && processedFiles == true) { +// finalSearcher = new Searcher(keywordLists, true); //final searcher run +// finalSearcher.execute(); +// } else { +// finalSearcherDone = true; +// } +// +// //log number of files / chunks in index +// //signal a potential change in number of text_ingested files +// try { +// final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); +// final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); +// logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); +// logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); +// } catch (NoOpenCoreException ex) { +// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); +// } catch (KeywordSearchModuleException se) { +// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); +// } +// +// //cleanup done in final searcher +// +// //postSummary(); +// } +// +// /** +// * Handle stop event (ingest interrupted) Cleanup resources, threads, timers +// */ +// @Override +// public void stop() { +// logger.log(Level.INFO, "stop()"); +// +// //stop timer +// commitTimer.stop(); +// //stop currentSearcher +// if (currentSearcher != null) { +// currentSearcher.cancel(true); +// } +// +// //cancel searcher timer, ensure unwanted searcher does not start +// if (searchTimer.isRunning()) { +// searchTimer.stop(); +// } +// runSearcher = false; +// finalSearcherDone = true; +// +// +// //commit uncommited files, don't search again +// commit(); +// +// //postSummary(); +// +// cleanup(); +// } +// +// /** +// * Common cleanup code when module stops or final searcher completes +// */ +// private void cleanup() { +// ingestStatus.clear(); +// currentResults.clear(); +// curDataSourceIds.clear(); +// currentSearcher = null; +// //finalSearcher = null; //do not collect, might be finalizing +// +// commitTimer.stop(); +// searchTimer.stop(); +// commitTimer = null; +// //searchTimer = null; // do not collect, final searcher might still be running, in which case it throws an exception +// +// textExtractors.clear(); +// textExtractors = null; +// stringExtractor = null; +// +// keywords.clear(); +// keywordLists.clear(); +// keywordToList.clear(); +// +// tikaFormatDetector = null; +// +// initialized = false; +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// /** +// * Initializes the module for new ingest run Sets up threads, timers, +// * retrieves settings, keyword lists to run on +// * +// */ +// @Override +// public void init(IngestModuleInit initContext) { +// logger.log(Level.INFO, "init()"); +// services = IngestServices.getDefault(); +// initialized = false; +// +// caseHandle = Case.getCurrentCase().getSleuthkitCase(); +// +// tikaFormatDetector = new Tika(); +// +// ingester = Server.getIngester(); +// +// final Server server = KeywordSearch.getServer(); +// try { +// if (!server.isRunning()) { +// String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); +// logger.log(Level.SEVERE, msg); +// String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// return; +// +// } +// } catch (KeywordSearchModuleException ex) { +// logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); +// //this means Solr is not properly initialized +// String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); +// String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// return; +// } +// +// +// //initialize extractors +// stringExtractor = new AbstractFileStringExtract(); +// stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); +// stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); +// +// +// //log the scripts used for debugging +// final StringBuilder sbScripts = new StringBuilder(); +// for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { +// sbScripts.append(s.name()).append(" "); +// } +// logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); +// +// textExtractors = new ArrayList(); +// //order matters, more specific extractors first +// textExtractors.add(new AbstractFileHtmlExtract()); +// textExtractors.add(new AbstractFileTikaTextExtract()); +// +// +// ingestStatus = new HashMap(); +// +// keywords = new ArrayList(); +// keywordLists = new ArrayList(); +// keywordToList = new HashMap(); +// +// initKeywords(); +// +// if (keywords.isEmpty() || keywordLists.isEmpty()) { +// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), +// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); +// } +// +// processedFiles = false; +// finalSearcherDone = false; +// searcherDone = true; //make sure to start the initial currentSearcher +// //keeps track of all results per run not to repeat reporting the same hits +// currentResults = new HashMap>(); +// +// curDataSourceIds = new HashSet(); +// +// indexer = new Indexer(); +// +// final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; +// logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); +// logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); +// +// commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); +// searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); +// +// initialized = true; +// +// commitTimer.start(); +// searchTimer.start(); +// } +// +// @Override +// public boolean hasSimpleConfiguration() { +// return true; +// } +// +// @Override +// public boolean hasAdvancedConfiguration() { +// return true; +// } +// +// @Override +// public javax.swing.JPanel getSimpleConfiguration(String context) { +// KeywordSearchListsXML.getCurrent().reload(); +// +// if (null == simpleConfigPanel) { +// simpleConfigPanel = new KeywordSearchIngestSimplePanel(); +// } +// else { +// simpleConfigPanel.load(); +// } +// +// return simpleConfigPanel; +// } +// +// @Override +// public javax.swing.JPanel getAdvancedConfiguration(String context) { +// if (advancedConfigPanel == null) { +// advancedConfigPanel = new KeywordSearchConfigurationPanel(); +// } +// +// advancedConfigPanel.load(); +// return advancedConfigPanel; +// } +// +// @Override +// public void saveAdvancedConfiguration() { +// if (advancedConfigPanel != null) { +// advancedConfigPanel.store(); +// } +// +// if (simpleConfigPanel != null) { +// simpleConfigPanel.load(); +// } +// } +// +// @Override +// public void saveSimpleConfiguration() { +// KeywordSearchListsXML.getCurrent().save(); +// } +// +// /** +// * The modules maintains background threads, return true if background +// * threads are running or there are pending tasks to be run in the future, +// * such as the final search post-ingest completion +// * +// * @return +// */ +// @Override +// public boolean hasBackgroundJobsRunning() { +// if ((currentSearcher != null && searcherDone == false) +// || (finalSearcherDone == false)) { +// return true; +// } else { +// return false; +// } +// +// } +// +// /** +// * Commits index and notifies listeners of index update +// */ +// private void commit() { +// if (initialized) { +// logger.log(Level.INFO, "Commiting index"); +// ingester.commit(); +// logger.log(Level.INFO, "Index comitted"); +// //signal a potential change in number of text_ingested files +// indexChangeNotify(); +// } +// } +// +// /** +// * Posts inbox message with summary of text_ingested files +// */ +// private void postIndexSummary() { +// int text_ingested = 0; +// int metadata_ingested = 0; +// int strings_ingested = 0; +// int error_text = 0; +// int error_index = 0; +// int error_io = 0; +// for (IngestStatus s : ingestStatus.values()) { +// switch (s) { +// case TEXT_INGESTED: +// ++text_ingested; +// break; +// case METADATA_INGESTED: +// ++metadata_ingested; +// break; +// case STRINGS_INGESTED: +// ++strings_ingested; +// break; +// case SKIPPED_ERROR_TEXTEXTRACT: +// error_text++; +// break; +// case SKIPPED_ERROR_INDEXING: +// error_index++; +// break; +// case SKIPPED_ERROR_IO: +// error_io++; +// break; +// default: +// ; +// } +// } +// +// StringBuilder msg = new StringBuilder(); +// msg.append(""); +// msg.append(""); +// msg.append(""); +// msg.append(""); +// msg.append(""); +// msg.append(""); +// msg.append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("").append(text_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.fileGenStringsHead")).append("").append(strings_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.mdOnlyLbl")).append("").append(metadata_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrLbl")).append("").append(error_index).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errTxtLbl")).append("").append(error_text).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append("
"); +// String indexStats = msg.toString(); +// logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); +// services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats)); +// if (error_index > 0) { +// MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrsTitle"), +// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrMsgFiles", error_index)); +// } +// else if (error_io + error_text > 0) { +// MessageNotifyUtil.Notify.warn(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxWarnMsgTitle"), +// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrReadFilesMsg")); +// } +// } +// +// /** +// * Helper method to notify listeners on index update +// */ +// private void indexChangeNotify() { +// //signal a potential change in number of text_ingested files +// try { +// final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); +// KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); +// } catch (NoOpenCoreException ex) { +// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); +// } catch (KeywordSearchModuleException se) { +// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); +// } +// } +// +// /** +// * Initialize the keyword search lists and associated keywords from the XML +// * loader Use the lists to ingest that are set in the permanent XML +// * configuration +// */ +// private void initKeywords() { +// addKeywordLists(null); +// } +// +// /** +// * If ingest is ongoing, this will add additional keyword search lists to +// * the ongoing ingest The lists to add may be temporary and not necessary +// * set to be added to ingest permanently in the XML configuration. The lists +// * will be reset back to original (permanent configuration state) on the +// * next ingest. +// * +// * @param listsToAdd lists to add temporarily to the ongoing ingest +// */ +// void addKeywordLists(List listsToAdd) { +// KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); +// +// keywords.clear(); +// keywordLists.clear(); +// keywordToList.clear(); +// +// StringBuilder sb = new StringBuilder(); +// +// for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { +// final String listName = list.getName(); +// if (list.getUseForIngest() == true +// || (listsToAdd != null && listsToAdd.contains(listName))) { +// keywordLists.add(listName); +// sb.append(listName).append(" "); +// } +// for (Keyword keyword : list.getKeywords()) { +// if (!keywords.contains(keyword)) { +// keywords.add(keyword); +// keywordToList.put(keyword.getQuery(), list); +// } +// } +// +// } +// +// logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); +// +// } +// +// List getKeywordLists() { +// return keywordLists == null ? new ArrayList() : keywordLists; +// } +// +// /** +// * Check if time to commit, if so, run commit. Then run search if search +// * timer is also set. +// */ +// void checkRunCommitSearch() { +// if (commitIndex) { +// logger.log(Level.INFO, "Commiting index"); +// commit(); +// commitIndex = false; +// +// //after commit, check if time to run searcher +// //NOTE commit/searcher timings don't need to align +// //in worst case, we will run search next time after commit timer goes off, or at the end of ingest +// if (searcherDone && runSearcher) { +// //start search if previous not running +// if (keywordLists != null && !keywordLists.isEmpty()) { +// currentSearcher = new Searcher(keywordLists); +// currentSearcher.execute();//searcher will stop timer and restart timer when done +// } +// } +// } +// } +// +// /** +// * CommitTimerAction to run by commitTimer Sets a flag to indicate we are +// * ready for commit +// */ +// private class CommitTimerAction implements ActionListener { +// +// private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); +// +// @Override +// public void actionPerformed(ActionEvent e) { +// commitIndex = true; +// logger.log(Level.INFO, "CommitTimer awake"); +// } +// } +// +// /** +// * SearchTimerAction to run by searchTimer Sets a flag to indicate we are +// * ready to search +// */ +// private class SearchTimerAction implements ActionListener { +// +// private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); +// +// @Override +// public void actionPerformed(ActionEvent e) { +// runSearcher = true; +// logger.log(Level.INFO, "SearchTimer awake"); +// } +// } +// +// /** +// * File indexer, processes and indexes known/allocated files, +// * unknown/unallocated files and directories accordingly +// */ +// private class Indexer { +// +// private final Logger logger = Logger.getLogger(Indexer.class.getName()); +// +// /** +// * Extract text with Tika or other text extraction modules (by +// * streaming) from the file Divide the file into chunks and index the +// * chunks +// * +// * @param aFile file to extract strings from, divide into chunks and +// * index +// * @param detectedFormat mime-type detected, or null if none detected +// * @return true if the file was text_ingested, false otherwise +// * @throws IngesterException exception thrown if indexing failed +// */ +// private boolean extractTextAndIndex(AbstractFile aFile, String detectedFormat) throws IngesterException { +// AbstractFileExtract fileExtract = null; +// +// //go over available text extractors in order, and pick the first one (most specific one) +// for (AbstractFileExtract fe : textExtractors) { +// if (fe.isSupported(aFile, detectedFormat)) { +// fileExtract = fe; +// break; +// } +// } +// +// if (fileExtract == null) { +// logger.log(Level.INFO, "No text extractor found for file id:" +// + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); +// return false; +// } +// +// //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); +// +// //divide into chunks and index +// return fileExtract.index(aFile); +// } +// +// /** +// * Extract strings using heuristics from the file and add to index. +// * +// * @param aFile file to extract strings from, divide into chunks and +// * index +// * @return true if the file was text_ingested, false otherwise +// */ +// private boolean extractStringsAndIndex(AbstractFile aFile) { +// try { +// if (stringExtractor.index(aFile)) { +// ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); +// return true; +// } else { +// logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); +// return false; +// } +// } catch (IngesterException ex) { +// logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +// return false; +// } +// } +// +// /** +// * Check with every extractor if it supports the file with the detected +// * format +// * +// * @param aFile file to check for +// * @param detectedFormat mime-type with detected format (such as +// * text/plain) or null if not detected +// * @return true if text extraction is supported +// */ +// private boolean isTextExtractSupported(AbstractFile aFile, String detectedFormat) { +// for (AbstractFileExtract extractor : textExtractors) { +// if (extractor.isContentTypeSpecific() == true +// && extractor.isSupported(aFile, detectedFormat)) { +// return true; +// } +// } +// return false; +// } +// +// /** +// * Adds the file to the index. Detects file type, calls extractors, etc. +// * +// * @param aFile File to analyze +// * @param indexContent False if only metadata should be text_ingested. True if +// * content and metadata should be index. +// */ +// private void indexFile(AbstractFile aFile, boolean indexContent) { +// //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); +// +// TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); +// +// // unallocated and unused blocks can only have strings extracted from them. +// if ((aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS))) { +// extractStringsAndIndex(aFile); +// } +// +// final long size = aFile.getSize(); +// //if not to index content, or a dir, or 0 content, index meta data only +// if ((indexContent == false || aFile.isDir() || size == 0)) { +// try { +// ingester.ingest(aFile, false); //meta-data only +// ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); +// } +// catch (IngesterException ex) { +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +// logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); +// } +// return; +// } +// +// //use Tika to detect the format +// String detectedFormat = null; +// InputStream is = null; +// try { +// is = new ReadContentInputStream(aFile); +// detectedFormat = tikaFormatDetector.detect(is, aFile.getName()); +// } +// catch (Exception e) { +// logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e); +// } +// finally { +// if (is != null) { +// try { +// is.close(); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Could not close stream after detecting format using tika for file: " +// + aFile, ex); +// } +// } +// } +// +// // @@@ Add file type signature to blackboard here +// +// //logger.log(Level.INFO, "Detected format: " + aFile.getName() + " " + detectedFormat); +// +// // we skip archive formats that are opened by the archive module. +// // @@@ We could have a check here to see if the archive module was enabled though... +// if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) { +// try { +// ingester.ingest(aFile, false); //meta-data only +// ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); +// } +// catch (IngesterException ex) { +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +// logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); +// } +// return; +// } +// +// boolean wasTextAdded = false; +// if (isTextExtractSupported(aFile, detectedFormat)) { +// //extract text with one of the extractors, divide into chunks and index with Solr +// try { +// //logger.log(Level.INFO, "indexing: " + aFile.getName()); +// if (!extractTextAndIndex(aFile, detectedFormat)) { +// logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); +// } else { +// ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); +// wasTextAdded = true; +// } +// +// } catch (IngesterException e) { +// logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", " +// + aFile.getName(), e); +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +// } catch (Exception e) { +// logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", " +// + aFile.getName(), e); +// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); +// } +// } +// +// // if it wasn't supported or had an error, default to strings +// if (wasTextAdded == false) { +// extractStringsAndIndex(aFile); +// } +// } +// } +// +// /** +// * Searcher responsible for searching the current index and writing results +// * to blackboard and the inbox. Also, posts results to listeners as Ingest +// * data events. Searches entire index, and keeps track of only new results +// * to report and save. Runs as a background thread. +// */ +// private final class Searcher extends SwingWorker { +// +// /** +// * Searcher has private copies/snapshots of the lists and keywords +// */ +// private List keywords; //keywords to search +// private List keywordLists; // lists currently being searched +// private Map keywordToList; //keyword to list name mapping +// private AggregateProgressHandle progressGroup; +// private final Logger logger = Logger.getLogger(Searcher.class.getName()); +// private boolean finalRun = false; +// +// Searcher(List keywordLists) { +// this.keywordLists = new ArrayList(keywordLists); +// this.keywords = new ArrayList(); +// this.keywordToList = new HashMap(); +// //keywords are populated as searcher runs +// } +// +// Searcher(List keywordLists, boolean finalRun) { +// this(keywordLists); +// this.finalRun = finalRun; +// } +// +// @Override +// protected Object doInBackground() throws Exception { +// if (finalRun) { +// logger.log(Level.INFO, "Pending start of new (final) searcher"); +// } else { +// logger.log(Level.INFO, "Pending start of new searcher"); +// } +// +// final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + +// (finalRun ? (" - "+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); +// progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("+ +// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") +")"), null, new Cancellable() { +// @Override +// public boolean cancel() { +// logger.log(Level.INFO, "Cancelling the searcher by user."); +// if (progressGroup != null) { +// progressGroup.setDisplayName(displayName + " ("+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.cancelMsg") +"...)"); +// } +// return Searcher.this.cancel(true); +// } +// }, null); +// +// updateKeywords(); +// +// ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; +// int i = 0; +// for (Keyword keywordQuery : keywords) { +// subProgresses[i] = +// AggregateProgressFactory.createProgressContributor(keywordQuery.getQuery()); +// progressGroup.addContributor(subProgresses[i]); +// i++; +// } +// +// progressGroup.start(); +// +// //block to ensure previous searcher is completely done with doInBackground() +// //even after previous searcher cancellation, we need to check this +// searcherLock.lock(); +// final StopWatch stopWatch = new StopWatch(); +// stopWatch.start(); +// try { +// logger.log(Level.INFO, "Started a new searcher"); +// progressGroup.setDisplayName(displayName); +// //make sure other searchers are not spawned +// searcherDone = false; +// runSearcher = false; +// if (searchTimer.isRunning()) { +// searchTimer.stop(); +// } +// +// int keywordsSearched = 0; +// +// //updateKeywords(); +// +// for (Keyword keywordQuery : keywords) { +// if (this.isCancelled()) { +// logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); +// return null; +// } +// +// final String queryStr = keywordQuery.getQuery(); +// final KeywordSearchListsAbstract.KeywordSearchList list = keywordToList.get(queryStr); +// final String listName = list.getName(); +// +// //new subProgress will be active after the initial query +// //when we know number of hits to start() with +// if (keywordsSearched > 0) { +// subProgresses[keywordsSearched - 1].finish(); +// } +// +// +// KeywordSearchQuery del = null; +// +// boolean isRegex = !keywordQuery.isLiteral(); +// if (isRegex) { +// del = new TermComponentQuery(keywordQuery); +// } +// else { +// del = new LuceneQuery(keywordQuery); +// del.escape(); +// } +// +// //limit search to currently ingested data sources +// //set up a filter with 1 or more image ids OR'ed +// final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); +// del.addFilter(dataSourceFilter); +// +// Map> queryResult = null; +// +// try { +// queryResult = del.performQuery(); +// } catch (NoOpenCoreException ex) { +// logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); +// //no reason to continue with next query if recovery failed +// //or wait for recovery to kick in and run again later +// //likely case has closed and threads are being interrupted +// return null; +// } catch (CancellationException e) { +// logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); +// return null; +// } catch (Exception e) { +// logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); +// continue; +// } +// +// // calculate new results but substracting results already obtained in this ingest +// // this creates a map of each keyword to the list of unique files that have that hit. +// Map> newResults = filterResults(queryResult, isRegex); +// +// if (!newResults.isEmpty()) { +// +// //write results to BB +// +// //new artifacts created, to report to listeners +// Collection newArtifacts = new ArrayList(); +// +// //scale progress bar more more granular, per result sub-progress, within per keyword +// int totalUnits = newResults.size(); +// subProgresses[keywordsSearched].start(totalUnits); +// int unitProgress = 0; +// String queryDisplayStr = keywordQuery.getQuery(); +// if (queryDisplayStr.length() > 50) { +// queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; +// } +// subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress); +// +// +// /* cycle through the keywords returned -- only one unless it was a regexp */ +// for (final Keyword hitTerm : newResults.keySet()) { +// //checking for cancellation between results +// if (this.isCancelled()) { +// logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); +// return null; +// } +// +// // update progress display +// String hitDisplayStr = hitTerm.getQuery(); +// if (hitDisplayStr.length() > 50) { +// hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; +// } +// subProgresses[keywordsSearched].progress(listName + ": " + hitDisplayStr, unitProgress); +// //subProgresses[keywordsSearched].progress(unitProgress); +// +// // this returns the unique files in the set with the first chunk that has a hit +// Map contentHitsFlattened = ContentHit.flattenResults(newResults.get(hitTerm)); +// for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { +// +// // get the snippet for the first hit in the file +// String snippet = null; +// final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); +// int chunkId = contentHitsFlattened.get(hitFile); +// try { +// snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); +// } catch (NoOpenCoreException e) { +// logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); +// //no reason to continue +// return null; +// } catch (Exception e) { +// logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); +// continue; +// } +// +// // write the blackboard artifact for this keyword in this file +// KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); +// if (written == null) { +// logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); +// continue; +// } +// +// newArtifacts.add(written.getArtifact()); +// +// //generate an ingest inbox message for this keyword in this file +// if (list.getIngestMessages()) { +// StringBuilder subjectSb = new StringBuilder(); +// StringBuilder detailsSb = new StringBuilder(); +// //final int hitFiles = newResults.size(); +// +// if (!keywordQuery.isLiteral()) { +// subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); +// } else { +// subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); +// } +// //subjectSb.append("<"); +// String uniqueKey = null; +// BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); +// if (attr != null) { +// final String keyword = attr.getValueString(); +// subjectSb.append(keyword); +// uniqueKey = keyword.toLowerCase(); +// } +// +// //subjectSb.append(">"); +// //String uniqueKey = queryStr; +// +// //details +// detailsSb.append(""); +// //hit +// detailsSb.append(""); +// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLThLbl")); +// detailsSb.append(""); +// detailsSb.append(""); +// +// //preview +// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); +// if (attr != null) { +// detailsSb.append(""); +// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); +// detailsSb.append(""); +// detailsSb.append(""); +// +// } +// +// //file +// detailsSb.append(""); +// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); +// detailsSb.append(""); +// +// detailsSb.append(""); +// +// +// //list +// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); +// detailsSb.append(""); +// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); +// detailsSb.append(""); +// detailsSb.append(""); +// +// //regex +// if (!keywordQuery.isLiteral()) { +// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); +// if (attr != null) { +// detailsSb.append(""); +// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); +// detailsSb.append(""); +// detailsSb.append(""); +// +// } +// } +// detailsSb.append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
").append(hitFile.getParentPath()).append(hitFile.getName()).append("
").append(attr.getValueString()).append("
").append(attr.getValueString()).append("
"); +// +// services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); +// } +// } //for each file hit +// +// ++unitProgress; +// +// }//for each hit term +// +// //update artifact browser +// if (!newArtifacts.isEmpty()) { +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); +// } +// } //if has results +// +// //reset the status text before it goes away +// subProgresses[keywordsSearched].progress(""); +// +// ++keywordsSearched; +// +// } //for each keyword +// +// } //end try block +// catch (Exception ex) { +// logger.log(Level.WARNING, "searcher exception occurred", ex); +// } finally { +// try { +// finalizeSearcher(); +// stopWatch.stop(); +// logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); +// } finally { +// searcherLock.unlock(); +// } +// } +// +// return null; +// } +// +// /** +// * Sync-up the updated keywords from the currently used lists in the XML +// */ +// private void updateKeywords() { +// KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); +// +// this.keywords.clear(); +// this.keywordToList.clear(); +// +// for (String name : this.keywordLists) { +// KeywordSearchListsAbstract.KeywordSearchList list = loader.getList(name); +// for (Keyword k : list.getKeywords()) { +// this.keywords.add(k); +// this.keywordToList.put(k.getQuery(), list); +// } +// } +// +// +// } +// +// //perform all essential cleanup that needs to be done right AFTER doInBackground() returns +// //without relying on done() method that is not guaranteed to run after background thread completes +// //NEED to call this method always right before doInBackground() returns +// /** +// * Performs the cleanup that needs to be done right AFTER +// * doInBackground() returns without relying on done() method that is not +// * guaranteed to run after background thread completes REQUIRED to call +// * this method always right before doInBackground() returns +// */ +// private void finalizeSearcher() { +// logger.log(Level.INFO, "Searcher finalizing"); +// SwingUtilities.invokeLater(new Runnable() { +// @Override +// public void run() { +// progressGroup.finish(); +// } +// }); +// searcherDone = true; //next currentSearcher can start +// +// if (finalRun) { +// //this is the final searcher +// logger.log(Level.INFO, "The final searcher in this ingest done."); +// finalSearcherDone = true; +// +// //run module cleanup +// cleanup(); +// } else { +// //start counting time for a new searcher to start +// //unless final searcher is pending +// if (finalSearcher == null) { +// //we need a new Timer object, because restarting previus will not cause firing of the action +// final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; +// searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); +// searchTimer.start(); +// } +// } +// } +// +// //calculate new results but substracting results already obtained in this ingest +// //update currentResults map with the new results +// private Map> filterResults(Map> queryResult, boolean isRegex) { +// Map> newResults = new HashMap>(); +// +// for (String termResult : queryResult.keySet()) { +// List queryTermResults = queryResult.get(termResult); +// +// //translate to list of IDs that we keep track of +// List queryTermResultsIDs = new ArrayList(); +// for (ContentHit ch : queryTermResults) { +// queryTermResultsIDs.add(ch.getId()); +// } +// +// Keyword termResultK = new Keyword(termResult, !isRegex); +// List curTermResults = currentResults.get(termResultK); +// if (curTermResults == null) { +// currentResults.put(termResultK, queryTermResultsIDs); +// newResults.put(termResultK, queryTermResults); +// } else { +// //some AbstractFile hits already exist for this keyword +// for (ContentHit res : queryTermResults) { +// if (!curTermResults.contains(res.getId())) { +// //add to new results +// List newResultsFs = newResults.get(termResultK); +// if (newResultsFs == null) { +// newResultsFs = new ArrayList(); +// newResults.put(termResultK, newResultsFs); +// } +// newResultsFs.add(res); +// curTermResults.add(res.getId()); +// } +// } +// } +// } +// +// return newResults; +// +// } +// } +//} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java index 3444d19d16..6aa3e48967 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java @@ -115,7 +115,7 @@ class KeywordSearchListsViewerPanel extends AbstractKeywordSearchPerformer { }); final KeywordSearchIngestModule module = KeywordSearchIngestModule.getDefault(); - if (IngestManager.getDefault().isModuleRunning(module)) { + if (IngestManager.getDefault().isIngestRunning()) { initIngest(true); } else { initIngest(false); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 100d3a9322..8bb393b617 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -20,520 +20,519 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.recentactivity; - -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonIOException; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; -import com.google.gson.JsonSyntaxException; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import java.util.*; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; +//package org.sleuthkit.autopsy.recentactivity; +// +//import com.google.gson.JsonArray; +//import com.google.gson.JsonElement; +//import com.google.gson.JsonIOException; +//import com.google.gson.JsonObject; +//import com.google.gson.JsonParser; +//import com.google.gson.JsonSyntaxException; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.datamodel.ContentUtils; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import java.util.*; +//import java.io.File; +//import java.io.FileNotFoundException; +//import java.io.FileReader; +//import java.io.IOException; +//import org.sleuthkit.autopsy.casemodule.services.FileManager; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; /** * Chrome recent activity extraction */ -class Chrome extends Extract { - - private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " - + "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; - private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; - private static final String bookmarkQuery = "SELECT starred.title, urls.url, starred.date_added, starred.date_modified, urls.typed_count,urls._last_visit_time FROM starred INNER JOIN urls ON urls.id = starred.url_id"; - private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; - private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; - private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; - private final Logger logger = Logger.getLogger(this.getClass().getName()); - private int ChromeCount = 0; - final private static String MODULE_VERSION = "1.0"; - private IngestServices services; - - //hide public constructor to prevent from instantiation by ingest module loader - Chrome() { - moduleName = "Chrome"; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - dataFound = false; - this.getHistory(dataSource, controller); - this.getBookmark(dataSource, controller); - this.getCookie(dataSource, controller); - this.getLogin(dataSource, controller); - this.getDownload(dataSource, controller); - } - - /** - * Query for history databases and add artifacts - * @param dataSource - * @param controller - */ - private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List historyFiles = null; - try { - historyFiles = fileManager.findFiles(dataSource, "History", "Chrome"); - } catch (TskCoreException ex) { - String msg = "Error when trying to get Chrome history files."; - logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - // get only the allocated ones, for now - List allocatedHistoryFiles = new ArrayList<>(); - for (AbstractFile historyFile : historyFiles) { - if (historyFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { - allocatedHistoryFiles.add(historyFile); - } - } - - // log a message if we don't have any allocated history files - if (allocatedHistoryFiles.isEmpty()) { - String msg = "Could not find any allocated Chrome history files."; - logger.log(Level.INFO, msg); - return; - } - - dataFound = true; - int j = 0; - while (j < historyFiles.size()) { - String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + historyFiles.get(j).getName().toString() + j + ".db"; - final AbstractFile historyFile = historyFiles.get(j++); - if (historyFile.getSize() == 0) { - continue; - } - try { - ContentUtils.writeToFile(historyFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome web history artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + historyFile.getName()); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - List> tempList = null; - tempList = this.dbConnect(temps, historyQuery); - logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_visit_time").toString())) / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "Recent Activity", ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); - - } - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); - } - - /** - * Search for bookmark files and make artifacts. - * @param dataSource - * @param controller - */ - private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List bookmarkFiles = null; - try { - bookmarkFiles = fileManager.findFiles(dataSource, "Bookmarks", "Chrome"); - } catch (TskCoreException ex) { - String msg = "Error when trying to get Chrome Bookmark files."; - logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (bookmarkFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any Chrome bookmark files."); - return; - } - - dataFound = true; - int j = 0; - - while (j < bookmarkFiles.size()) { - AbstractFile bookmarkFile = bookmarkFiles.get(j++); - if (bookmarkFile.getSize() == 0) { - continue; - } - String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + bookmarkFile.getName().toString() + j + ".db"; - try { - ContentUtils.writeToFile(bookmarkFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome bookmark artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + bookmarkFile.getName()); - continue; - } - - logger.log(Level.INFO, moduleName + "- Now getting Bookmarks from " + temps); - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - - FileReader tempReader; - try { - tempReader = new FileReader(temps); - } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "Error while trying to read into the Bookmarks for Chrome.", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file: " + bookmarkFile.getName()); - continue; - } - - final JsonParser parser = new JsonParser(); - JsonElement jsonElement; - JsonObject jElement, jRoot, jBookmark; - JsonArray jBookmarkArray; - - try { - jsonElement = parser.parse(tempReader); - jElement = jsonElement.getAsJsonObject(); - jRoot = jElement.get("roots").getAsJsonObject(); - jBookmark = jRoot.get("bookmark_bar").getAsJsonObject(); - jBookmarkArray = jBookmark.getAsJsonArray("children"); - } catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) { - logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file: " + bookmarkFile.getName()); - continue; - } - - for (JsonElement result : jBookmarkArray) { - JsonObject address = result.getAsJsonObject(); - if (address == null) { - continue; - } - JsonElement urlEl = address.get("url"); - String url = null; - if (urlEl != null) { - url = urlEl.getAsString(); - } - else { - url = ""; - } - String name = null; - JsonElement nameEl = address.get("name"); - if (nameEl != null) { - name = nameEl.getAsString(); - } - else { - name = ""; - } - Long date = null; - JsonElement dateEl = address.get("date_added"); - if (dateEl != null) { - date = dateEl.getAsLong(); - } - else { - date = Long.valueOf(0); - } - String domain = Util.extractDomain(url); - try { - BlackboardArtifact bbart = bookmarkFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK); - Collection bbattributes = new ArrayList(); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", (date / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "Recent Activity", (date / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error while trying to insert Chrome bookmark artifact{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + bookmarkFile.getName()); - } - } - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); - } - - /** - * Queries for cookie files and adds artifacts - * @param dataSource - * @param controller - */ - private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List cookiesFiles = null; - try { - cookiesFiles = fileManager.findFiles(dataSource, "Cookies", "Chrome"); - } catch (TskCoreException ex) { - String msg = "Error when trying to get Chrome history files."; - logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (cookiesFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any Chrome cookies files."); - return; - } - - dataFound = true; - int j = 0; - while (j < cookiesFiles.size()) { - AbstractFile cookiesFile = cookiesFiles.get(j++); - if (cookiesFile.getSize() == 0) { - continue; - } - String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + cookiesFile.getName().toString() + j + ".db"; - try { - ContentUtils.writeToFile(cookiesFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome cookie artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + cookiesFile.getName()); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - - List> tempList = this.dbConnect(temps, cookieQuery); - logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_access_utc").toString())) / 10000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "Recent Activity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - String domain = result.get("host_key").toString(); - domain = domain.replaceFirst("^\\.+(?!$)", ""); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - } - - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); - } - - /** - * Queries for download files and adds artifacts - * @param dataSource - * @param controller - */ - private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadFiles = null; - try { - downloadFiles = fileManager.findFiles(dataSource, "History", "Chrome"); - } catch (TskCoreException ex) { - String msg = "Error when trying to get Chrome history files."; - logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (downloadFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any Chrome download files."); - return; - } - - dataFound = true; - int j = 0; - while (j < downloadFiles.size()) { - AbstractFile downloadFile = downloadFiles.get(j++); - if (downloadFile.getSize() == 0) { - continue; - } - String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + downloadFile.getName().toString() + j + ".db"; - try { - ContentUtils.writeToFile(downloadFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome download artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + downloadFile.getName()); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - - List> tempList = null; - - if (isChromePreVersion30(temps)) { - tempList = this.dbConnect(temps, downloadQuery); - } else { - tempList = this.dbConnect(temps, downloadQueryVersion30); - } - - logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "Recent Activity", (result.get("full_path").toString()))); - long pathID = Util.findID(dataSource, (result.get("full_path").toString())); - if (pathID != -1) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "Recent Activity", pathID)); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); - Long time = (Long.valueOf(result.get("start_time").toString())); - String Tempdate = time.toString(); - time = Long.valueOf(Tempdate) / 10000000; - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", time)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", time)); - String domain = Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); - } - - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); - } - - /** - * Queries for login files and adds artifacts - * @param dataSource - * @param controller - */ - private void getLogin(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List signonFiles = null; - try { - signonFiles = fileManager.findFiles(dataSource, "signons.sqlite", "Chrome"); - } catch (TskCoreException ex) { - String msg = "Error when trying to get Chrome history files."; - logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (signonFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any Chrome signon files."); - return; - } - - dataFound = true; - int j = 0; - while (j < signonFiles.size()) { - AbstractFile signonFile = signonFiles.get(j++); - if (signonFile.getSize() == 0) { - continue; - } - String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + signonFile.getName().toString() + j + ".db"; - try { - ContentUtils.writeToFile(signonFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome login artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + signonFile.getName()); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - List> tempList = this.dbConnect(temps, loginQuery); - logger.log(Level.INFO, moduleName + "- Now getting login information from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("origin_url").toString() != null) ? result.get("origin_url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("origin_url").toString() != null) ? EscapeUtil.decodeURL(result.get("origin_url").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", ((Long.valueOf(result.get("last_visit_time").toString())) / 1000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_visit_time").toString())) / 1000000))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "Recent Activity", ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", (Util.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "Recent Activity", ((result.get("username_value").toString() != null) ? result.get("username_value").toString().replaceAll("'", "''") : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", result.get("signon_realm").toString())); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, signonFile, bbattributes); - } - - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - } - - @Override - public void complete() { - } - - @Override - public void stop() { - } - - @Override - public String getDescription() { - return "Extracts activity from the Google Chrome browser."; - } - - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - - private boolean isChromePreVersion30(String temps) { - String query = "PRAGMA table_info(downloads)"; - List> columns = this.dbConnect(temps, query); - for (HashMap col : columns) { - if (col.get("name").equals("url")) { - return true; - } - } - - return false; - } -} +//class Chrome extends Extract { +// +// private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " +// + "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; +// private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies"; +// private static final String bookmarkQuery = "SELECT starred.title, urls.url, starred.date_added, starred.date_modified, urls.typed_count,urls._last_visit_time FROM starred INNER JOIN urls ON urls.id = starred.url_id"; +// private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads"; +// private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; +// private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; +// private final Logger logger = Logger.getLogger(this.getClass().getName()); +// private int ChromeCount = 0; +// final private static String MODULE_VERSION = "1.0"; +// private IngestServices services; +// +// //hide public constructor to prevent from instantiation by ingest module loader +// Chrome() { +// moduleName = "Chrome"; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// dataFound = false; +// this.getHistory(dataSource, controller); +// this.getBookmark(dataSource, controller); +// this.getCookie(dataSource, controller); +// this.getLogin(dataSource, controller); +// this.getDownload(dataSource, controller); +// } +// +// /** +// * Query for history databases and add artifacts +// * @param dataSource +// * @param controller +// */ +// private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List historyFiles = null; +// try { +// historyFiles = fileManager.findFiles(dataSource, "History", "Chrome"); +// } catch (TskCoreException ex) { +// String msg = "Error when trying to get Chrome history files."; +// logger.log(Level.SEVERE, msg, ex); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// // get only the allocated ones, for now +// List allocatedHistoryFiles = new ArrayList<>(); +// for (AbstractFile historyFile : historyFiles) { +// if (historyFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) { +// allocatedHistoryFiles.add(historyFile); +// } +// } +// +// // log a message if we don't have any allocated history files +// if (allocatedHistoryFiles.isEmpty()) { +// String msg = "Could not find any allocated Chrome history files."; +// logger.log(Level.INFO, msg); +// return; +// } +// +// dataFound = true; +// int j = 0; +// while (j < historyFiles.size()) { +// String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + historyFiles.get(j).getName().toString() + j + ".db"; +// final AbstractFile historyFile = historyFiles.get(j++); +// if (historyFile.getSize() == 0) { +// continue; +// } +// try { +// ContentUtils.writeToFile(historyFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome web history artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + historyFile.getName()); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// List> tempList = null; +// tempList = this.dbConnect(temps, historyQuery); +// logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_visit_time").toString())) / 10000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "Recent Activity", ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); +// +// } +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); +// } +// +// /** +// * Search for bookmark files and make artifacts. +// * @param dataSource +// * @param controller +// */ +// private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List bookmarkFiles = null; +// try { +// bookmarkFiles = fileManager.findFiles(dataSource, "Bookmarks", "Chrome"); +// } catch (TskCoreException ex) { +// String msg = "Error when trying to get Chrome Bookmark files."; +// logger.log(Level.SEVERE, msg, ex); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (bookmarkFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any Chrome bookmark files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// +// while (j < bookmarkFiles.size()) { +// AbstractFile bookmarkFile = bookmarkFiles.get(j++); +// if (bookmarkFile.getSize() == 0) { +// continue; +// } +// String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + bookmarkFile.getName().toString() + j + ".db"; +// try { +// ContentUtils.writeToFile(bookmarkFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome bookmark artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + bookmarkFile.getName()); +// continue; +// } +// +// logger.log(Level.INFO, moduleName + "- Now getting Bookmarks from " + temps); +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// +// FileReader tempReader; +// try { +// tempReader = new FileReader(temps); +// } catch (FileNotFoundException ex) { +// logger.log(Level.SEVERE, "Error while trying to read into the Bookmarks for Chrome.", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file: " + bookmarkFile.getName()); +// continue; +// } +// +// final JsonParser parser = new JsonParser(); +// JsonElement jsonElement; +// JsonObject jElement, jRoot, jBookmark; +// JsonArray jBookmarkArray; +// +// try { +// jsonElement = parser.parse(tempReader); +// jElement = jsonElement.getAsJsonObject(); +// jRoot = jElement.get("roots").getAsJsonObject(); +// jBookmark = jRoot.get("bookmark_bar").getAsJsonObject(); +// jBookmarkArray = jBookmark.getAsJsonArray("children"); +// } catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) { +// logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file: " + bookmarkFile.getName()); +// continue; +// } +// +// for (JsonElement result : jBookmarkArray) { +// JsonObject address = result.getAsJsonObject(); +// if (address == null) { +// continue; +// } +// JsonElement urlEl = address.get("url"); +// String url = null; +// if (urlEl != null) { +// url = urlEl.getAsString(); +// } +// else { +// url = ""; +// } +// String name = null; +// JsonElement nameEl = address.get("name"); +// if (nameEl != null) { +// name = nameEl.getAsString(); +// } +// else { +// name = ""; +// } +// Long date = null; +// JsonElement dateEl = address.get("date_added"); +// if (dateEl != null) { +// date = dateEl.getAsLong(); +// } +// else { +// date = Long.valueOf(0); +// } +// String domain = Util.extractDomain(url); +// try { +// BlackboardArtifact bbart = bookmarkFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK); +// Collection bbattributes = new ArrayList(); +// //TODO Revisit usage of deprecated constructor as per TSK-583 +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", (date / 10000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", url)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "Recent Activity", name)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "Recent Activity", (date / 10000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error while trying to insert Chrome bookmark artifact{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + bookmarkFile.getName()); +// } +// } +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); +// } +// +// /** +// * Queries for cookie files and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List cookiesFiles = null; +// try { +// cookiesFiles = fileManager.findFiles(dataSource, "Cookies", "Chrome"); +// } catch (TskCoreException ex) { +// String msg = "Error when trying to get Chrome history files."; +// logger.log(Level.SEVERE, msg, ex); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (cookiesFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any Chrome cookies files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// while (j < cookiesFiles.size()) { +// AbstractFile cookiesFile = cookiesFiles.get(j++); +// if (cookiesFile.getSize() == 0) { +// continue; +// } +// String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + cookiesFile.getName().toString() + j + ".db"; +// try { +// ContentUtils.writeToFile(cookiesFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome cookie artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + cookiesFile.getName()); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// +// List> tempList = this.dbConnect(temps, cookieQuery); +// logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("host_key").toString() != null) ? result.get("host_key").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_access_utc").toString())) / 10000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "Recent Activity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); +// String domain = result.get("host_key").toString(); +// domain = domain.replaceFirst("^\\.+(?!$)", ""); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); +// } +// +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); +// } +// +// /** +// * Queries for download files and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List downloadFiles = null; +// try { +// downloadFiles = fileManager.findFiles(dataSource, "History", "Chrome"); +// } catch (TskCoreException ex) { +// String msg = "Error when trying to get Chrome history files."; +// logger.log(Level.SEVERE, msg, ex); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (downloadFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any Chrome download files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// while (j < downloadFiles.size()) { +// AbstractFile downloadFile = downloadFiles.get(j++); +// if (downloadFile.getSize() == 0) { +// continue; +// } +// String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + downloadFile.getName().toString() + j + ".db"; +// try { +// ContentUtils.writeToFile(downloadFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome download artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + downloadFile.getName()); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// +// List> tempList = null; +// +// if (isChromePreVersion30(temps)) { +// tempList = this.dbConnect(temps, downloadQuery); +// } else { +// tempList = this.dbConnect(temps, downloadQueryVersion30); +// } +// +// logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "Recent Activity", (result.get("full_path").toString()))); +// long pathID = Util.findID(dataSource, (result.get("full_path").toString())); +// if (pathID != -1) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "Recent Activity", pathID)); +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); +// Long time = (Long.valueOf(result.get("start_time").toString())); +// String Tempdate = time.toString(); +// time = Long.valueOf(Tempdate) / 10000000; +// //TODO Revisit usage of deprecated constructor as per TSK-583 +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", time)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", time)); +// String domain = Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", domain)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); +// } +// +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); +// } +// +// /** +// * Queries for login files and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getLogin(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List signonFiles = null; +// try { +// signonFiles = fileManager.findFiles(dataSource, "signons.sqlite", "Chrome"); +// } catch (TskCoreException ex) { +// String msg = "Error when trying to get Chrome history files."; +// logger.log(Level.SEVERE, msg, ex); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (signonFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any Chrome signon files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// while (j < signonFiles.size()) { +// AbstractFile signonFile = signonFiles.get(j++); +// if (signonFile.getSize() == 0) { +// continue; +// } +// String temps = RAImageIngestModule.getRATempPath(currentCase, "chrome") + File.separator + signonFile.getName().toString() + j + ".db"; +// try { +// ContentUtils.writeToFile(signonFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing temp sqlite db for Chrome login artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + signonFile.getName()); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// List> tempList = this.dbConnect(temps, loginQuery); +// logger.log(Level.INFO, moduleName + "- Now getting login information from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("origin_url").toString() != null) ? result.get("origin_url").toString() : ""))); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("origin_url").toString() != null) ? EscapeUtil.decodeURL(result.get("origin_url").toString()) : ""))); +// //TODO Revisit usage of deprecated constructor as per TSK-583 +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", ((Long.valueOf(result.get("last_visit_time").toString())) / 1000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "Recent Activity", ((Long.valueOf(result.get("last_visit_time").toString())) / 1000000))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "Recent Activity", ((result.get("from_visit").toString() != null) ? result.get("from_visit").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "Recent Activity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "Recent Activity", "Chrome")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", (Util.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "Recent Activity", ((result.get("username_value").toString() != null) ? result.get("username_value").toString().replaceAll("'", "''") : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "Recent Activity", result.get("signon_realm").toString())); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, signonFile, bbattributes); +// } +// +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public void stop() { +// } +// +// @Override +// public String getDescription() { +// return "Extracts activity from the Google Chrome browser."; +// } +// +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +// +// private boolean isChromePreVersion30(String temps) { +// String query = "PRAGMA table_info(downloads)"; +// List> columns = this.dbConnect(temps, query); +// for (HashMap col : columns) { +// if (col.get("name").equals("url")) { +// return true; +// } +// } +// +// return false; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index 66a628f389..a6c20304bd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -22,130 +22,131 @@ */ package org.sleuthkit.autopsy.recentactivity; -import java.sql.ResultSet; -import java.sql.ResultSetMetaData; -import java.sql.SQLException; -import java.util.*; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.report.SQLiteDBConnect; -import org.sleuthkit.datamodel.*; +// RJCTODO +//import java.sql.ResultSet; +//import java.sql.ResultSetMetaData; +//import java.sql.SQLException; +//import java.util.*; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.report.SQLiteDBConnect; +//import org.sleuthkit.datamodel.*; -abstract class Extract extends IngestModuleDataSource{ - - protected Case currentCase = Case.getCurrentCase(); // get the most updated case - protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); - public final Logger logger = Logger.getLogger(this.getClass().getName()); - private final ArrayList errorMessages = new ArrayList<>(); - String moduleName = ""; - boolean dataFound = false; - - //hide public constructor to prevent from instantiation by ingest module loader - Extract() { - dataFound = false; - } - - /** - * Returns a List of string error messages from the inheriting class - * @return errorMessages returns all error messages logged - */ - List getErrorMessages() { - return errorMessages; - } - - /** - * Adds a string to the error message list - * - * @param message is an error message represented as a string - */ - protected void addErrorMessage(String message) { - errorMessages.add(message); - } - - - /** - * Generic method for adding a blackboard artifact to the blackboard - * - * @param type is a blackboard.artifact_type enum to determine which type - * the artifact should be - * @param content is the AbstractFile object that needs to have the artifact - * added for it - * @param bbattributes is the collection of blackboard attributes that need - * to be added to the artifact after the artifact has been created - */ - public void addArtifact(BlackboardArtifact.ARTIFACT_TYPE type, AbstractFile content, Collection bbattributes) { - - try { - BlackboardArtifact bbart = content.newArtifact(type); - bbart.addAttributes(bbattributes); - } catch (TskException ex) { - logger.log(Level.SEVERE, "Error while trying to add an artifact: " + ex); - } - } - - /** - * Returns a List from a result set based on sql query. - * This is used to query sqlite databases storing user recent activity data, such as in firefox sqlite db - * - * @param path is the string path to the sqlite db file - * @param query is a sql string query that is to be run - * @return list is the ArrayList that contains the resultset information in it that the query obtained - */ - public List> dbConnect(String path, String query) { - ResultSet temprs = null; - List> list = new ArrayList>(); - String connectionString = "jdbc:sqlite:" + path; - try { - SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); - temprs = tempdbconnect.executeQry(query); - list = this.resultSetToArrayList(temprs); - tempdbconnect.closeConnection(); - } catch (SQLException ex) { - logger.log(Level.SEVERE, "Error while trying to read into a sqlite db." + connectionString, ex); - errorMessages.add(getName() + ": Failed to query database."); - return Collections.>emptyList(); - } - return list; - } - - /** - * Returns a List of AbstractFile objects from TSK based on sql query. - * - * @param rs is the resultset that needs to be converted to an arraylist - * @return list returns the arraylist built from the converted resultset - */ - private List> resultSetToArrayList(ResultSet rs) throws SQLException { - ResultSetMetaData md = rs.getMetaData(); - int columns = md.getColumnCount(); - List> list = new ArrayList>(50); - while (rs.next()) { - HashMap row = new HashMap(columns); - for (int i = 1; i <= columns; ++i) { - if (rs.getObject(i) == null) { - row.put(md.getColumnName(i), ""); - } else { - row.put(md.getColumnName(i), rs.getObject(i)); - } - } - list.add(row); - } - - return list; - } - - - - /** - * Returns the name of the inheriting class - * @return Gets the moduleName set in the moduleName data member - */ - public String getName() { - return moduleName; - } - - public boolean foundData() { - return dataFound; - } -} \ No newline at end of file +//abstract class Extract extends IngestModuleDataSource{ +// +// protected Case currentCase = Case.getCurrentCase(); // get the most updated case +// protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); +// public final Logger logger = Logger.getLogger(this.getClass().getName()); +// private final ArrayList errorMessages = new ArrayList<>(); +// String moduleName = ""; +// boolean dataFound = false; +// +// //hide public constructor to prevent from instantiation by ingest module loader +// Extract() { +// dataFound = false; +// } +// +// /** +// * Returns a List of string error messages from the inheriting class +// * @return errorMessages returns all error messages logged +// */ +// List getErrorMessages() { +// return errorMessages; +// } +// +// /** +// * Adds a string to the error message list +// * +// * @param message is an error message represented as a string +// */ +// protected void addErrorMessage(String message) { +// errorMessages.add(message); +// } +// +// +// /** +// * Generic method for adding a blackboard artifact to the blackboard +// * +// * @param type is a blackboard.artifact_type enum to determine which type +// * the artifact should be +// * @param content is the AbstractFile object that needs to have the artifact +// * added for it +// * @param bbattributes is the collection of blackboard attributes that need +// * to be added to the artifact after the artifact has been created +// */ +// public void addArtifact(BlackboardArtifact.ARTIFACT_TYPE type, AbstractFile content, Collection bbattributes) { +// +// try { +// BlackboardArtifact bbart = content.newArtifact(type); +// bbart.addAttributes(bbattributes); +// } catch (TskException ex) { +// logger.log(Level.SEVERE, "Error while trying to add an artifact: " + ex); +// } +// } +// +// /** +// * Returns a List from a result set based on sql query. +// * This is used to query sqlite databases storing user recent activity data, such as in firefox sqlite db +// * +// * @param path is the string path to the sqlite db file +// * @param query is a sql string query that is to be run +// * @return list is the ArrayList that contains the resultset information in it that the query obtained +// */ +// public List> dbConnect(String path, String query) { +// ResultSet temprs = null; +// List> list = new ArrayList>(); +// String connectionString = "jdbc:sqlite:" + path; +// try { +// SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); +// temprs = tempdbconnect.executeQry(query); +// list = this.resultSetToArrayList(temprs); +// tempdbconnect.closeConnection(); +// } catch (SQLException ex) { +// logger.log(Level.SEVERE, "Error while trying to read into a sqlite db." + connectionString, ex); +// errorMessages.add(getName() + ": Failed to query database."); +// return Collections.>emptyList(); +// } +// return list; +// } +// +// /** +// * Returns a List of AbstractFile objects from TSK based on sql query. +// * +// * @param rs is the resultset that needs to be converted to an arraylist +// * @return list returns the arraylist built from the converted resultset +// */ +// private List> resultSetToArrayList(ResultSet rs) throws SQLException { +// ResultSetMetaData md = rs.getMetaData(); +// int columns = md.getColumnCount(); +// List> list = new ArrayList>(50); +// while (rs.next()) { +// HashMap row = new HashMap(columns); +// for (int i = 1; i <= columns; ++i) { +// if (rs.getObject(i) == null) { +// row.put(md.getColumnName(i), ""); +// } else { +// row.put(md.getColumnName(i), rs.getObject(i)); +// } +// } +// list.add(row); +// } +// +// return list; +// } +// +// +// +// /** +// * Returns the name of the inheriting class +// * @return Gets the moduleName set in the moduleName data member +// */ +// public String getName() { +// return moduleName; +// } +// +// public boolean foundData() { +// return dataFound; +// } +//} \ No newline at end of file diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 1af2322b1c..52d7581fe9 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -20,492 +20,491 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.recentactivity; +//package org.sleuthkit.autopsy.recentactivity; +// +////IO imports +//import java.io.BufferedReader; +//import org.sleuthkit.autopsy.coreutils.ExecUtil; +//import java.io.File; +//import java.io.FileInputStream; +//import java.io.FileNotFoundException; +//import java.io.FileWriter; +//import java.io.IOException; +//import java.io.InputStream; +//import java.io.InputStreamReader; +//import java.io.Reader; +//import java.io.Writer; +// +////Util Imports +//import java.text.ParseException; +//import java.text.SimpleDateFormat; +//import java.util.ArrayList; +//import java.util.List; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import java.util.Collection; +//import java.util.Scanner; +//import java.util.regex.Matcher; +//import java.util.regex.Pattern; +// +//// TSK Imports +//import org.openide.modules.InstalledFileLocator; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.coreutils.JLNK; +//import org.sleuthkit.autopsy.coreutils.JLnkParser; +//import org.sleuthkit.autopsy.coreutils.JLnkParserException; +//import org.sleuthkit.autopsy.datamodel.ContentUtils; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.datamodel.*; -//IO imports -import java.io.BufferedReader; -import org.sleuthkit.autopsy.coreutils.ExecUtil; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; -import java.io.Writer; - -//Util Imports -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import java.util.Collection; -import java.util.Scanner; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -// TSK Imports -import org.openide.modules.InstalledFileLocator; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.JLNK; -import org.sleuthkit.autopsy.coreutils.JLnkParser; -import org.sleuthkit.autopsy.coreutils.JLnkParserException; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.datamodel.*; - -class ExtractIE extends Extract { - private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); - private IngestServices services; - - //paths set in init() - private String moduleTempResultsDir; - private String PASCO_LIB_PATH; - private String JAVA_PATH; - - final private static String MODULE_VERSION = "1.0"; - private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); - - private ExecUtil execPasco; - - //hide public constructor to prevent from instantiation by ingest module loader - ExtractIE() { - moduleName = "Internet Explorer"; - moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "IE") + File.separator + "results"; - JAVA_PATH = PlatformUtil.getJavaPath(); - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - dataFound = false; - this.getBookmark(dataSource, controller); - this.getCookie(dataSource, controller); - this.getHistory(dataSource, controller); - } - - /** - * Finds the files storing bookmarks and creates artifacts - * @param dataSource - * @param controller - */ - private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List favoritesFiles = null; - try { - favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites"); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); - this.addErrorMessage(this.getName() + ": Error getting Internet Explorer Bookmarks."); - return; - } - - if (favoritesFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any IE bookmark files."); - return; - } - - dataFound = true; - for (AbstractFile fav : favoritesFiles) { - if (fav.getSize() == 0) { - continue; - } - - if (controller.isCancelled()) { - break; - } - - String url = getURLFromIEBookmarkFile(fav); - - String name = fav.getName(); - Long datetime = fav.getCrtime(); - String Tempdate = datetime.toString(); - datetime = Long.valueOf(Tempdate); - String domain = Util.extractDomain(url); - - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", datetime)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes); - } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); - } - - private String getURLFromIEBookmarkFile(AbstractFile fav) { - BufferedReader reader = new BufferedReader(new InputStreamReader(new ReadContentInputStream(fav))); - String line, url = ""; - try { - while ((line = reader.readLine()) != null) { - // The actual shortcut line we are interested in is of the - // form URL=http://path/to/website - if (line.startsWith("URL")) { - url = line.substring(line.indexOf("=") + 1); - break; - } - } - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); - this.addErrorMessage(this.getName() + ": Error parsing IE bookmark File " + fav.getName()); - } catch (IndexOutOfBoundsException ex) { - logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); - this.addErrorMessage(this.getName() + ": Error parsing IE bookmark File " + fav.getName()); - } finally { - try { - reader.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to close reader.", ex); - } - } - - return url; - } - - /** - * Finds files that store cookies and adds artifacts for them. - * @param dataSource - * @param controller - */ - private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List cookiesFiles = null; - try { - cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies"); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error getting cookie files for IE"); - this.addErrorMessage(this.getName() + ": " + "Error getting Internet Explorer cookie files."); - return; - } - - if (cookiesFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any IE cookies files."); - return; - } - - dataFound = true; - for (AbstractFile cookiesFile : cookiesFiles) { - if (controller.isCancelled()) { - break; - } - if (cookiesFile.getSize() == 0) { - continue; - } - - byte[] t = new byte[(int) cookiesFile.getSize()]; - try { - final int bytesRead = cookiesFile.read(t, 0, cookiesFile.getSize()); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error reading bytes of Internet Explorer cookie.", ex); - this.addErrorMessage(this.getName() + ": Error reading Internet Explorer cookie " + cookiesFile.getName()); - continue; - } - String cookieString = new String(t); - String[] values = cookieString.split("\n"); - String url = values.length > 2 ? values[2] : ""; - String value = values.length > 1 ? values[1] : ""; - String name = values.length > 0 ? values[0] : ""; - Long datetime = cookiesFile.getCrtime(); - String tempDate = datetime.toString(); - datetime = Long.valueOf(tempDate); - String domain = Util.extractDomain(url); - - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); - } - - /** - * Locates index.dat files, runs Pasco on them, and creates artifacts. - * @param dataSource - * @param controller - */ - private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { - logger.log(Level.INFO, "Pasco results path: " + moduleTempResultsDir); - boolean foundHistory = false; - - final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); - if (pascoRoot == null) { - this.addErrorMessage(this.getName() + ": Unable to get IE History: pasco not found"); - logger.log(Level.SEVERE, "Error finding pasco program "); - return; - } - - final String pascoHome = pascoRoot.getAbsolutePath(); - logger.log(Level.INFO, "Pasco2 home: " + pascoHome); - - PASCO_LIB_PATH = pascoHome + File.separator + "pasco2.jar" + File.pathSeparator - + pascoHome + File.separator + "*"; - - File resultsDir = new File(moduleTempResultsDir); - resultsDir.mkdirs(); - - // get index.dat files - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List indexFiles = null; - try { - indexFiles = fileManager.findFiles(dataSource, "index.dat"); - } catch (TskCoreException ex) { - this.addErrorMessage(this.getName() + ": Error getting Internet Explorer history files"); - logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); - return; - } - - if (indexFiles.isEmpty()) { - String msg = "No InternetExplorer history files found."; - logger.log(Level.INFO, msg); - return; - } - - dataFound = true; - String temps; - String indexFileName; - for (AbstractFile indexFile : indexFiles) { - // Since each result represent an index.dat file, - // just create these files with the following notation: - // index.dat (i.e. index0.dat, index1.dat,..., indexN.dat) - // Write each index.dat file to a temp directory. - //BlackboardArtifact bbart = fsc.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); - indexFileName = "index" + Integer.toString((int) indexFile.getId()) + ".dat"; - //indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat"; - temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; - File datFile = new File(temps); - if (controller.isCancelled()) { - break; - } - try { - ContentUtils.writeToFile(indexFile, datFile); - } catch (IOException e) { - logger.log(Level.SEVERE, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); - this.addErrorMessage(this.getName() + ": Error while trying to write file:" + datFile.getAbsolutePath()); - continue; - } - - String filename = "pasco2Result." + indexFile.getId() + ".txt"; - boolean bPascProcSuccess = executePasco(temps, filename); - - //At this point pasco2 proccessed the index files. - //Now fetch the results, parse them and the delete the files. - if (bPascProcSuccess) { - parsePascoOutput(indexFile, filename); - foundHistory = true; - - //Delete index.dat file since it was succcessfully by Pasco - datFile.delete(); - } else { - logger.log(Level.WARNING, "pasco execution failed on: " + this.getName()); - this.addErrorMessage(this.getName() + ": Error processing Internet Explorer history."); - } - } - - if (foundHistory) { - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); - } - } - - /** - * Execute pasco on a single file that has been saved to disk. - * @param indexFilePath Path to local index.dat file to analyze - * @param outputFileName Name of file to save output to - * @return false on error - */ - private boolean executePasco(String indexFilePath, String outputFileName) { - boolean success = true; - - Writer writer = null; - try { - final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; - logger.log(Level.INFO, "Writing pasco results to: " + outputFileFullPath); - writer = new FileWriter(outputFileFullPath); - execPasco = new ExecUtil(); - execPasco.execute(writer, JAVA_PATH, - "-cp", PASCO_LIB_PATH, - "isi.pasco2.Main", "-T", "history", indexFilePath ); - // @@@ Investigate use of history versus cache as type. - } catch (IOException ex) { - success = false; - logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); - } catch (InterruptedException ex) { - success = false; - logger.log(Level.SEVERE, "Pasco has been interrupted, failed to extract some web history from Internet Explorer.", ex); - } - finally { - if (writer != null) { - try { - writer.flush(); - writer.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Error closing writer stream after for Pasco result", ex); - } - } - } - return success; - } - - /** - * parse Pasco output and create artifacts - * @param origFile Original index.dat file that was analyzed to get this output - * @param pascoOutputFileName name of pasco output file - */ - private void parsePascoOutput(AbstractFile origFile, String pascoOutputFileName) { - - String fnAbs = moduleTempResultsDir + File.separator + pascoOutputFileName; - - File file = new File(fnAbs); - if (file.exists() == false) { - this.addErrorMessage(this.getName() + ": Pasco output not found: " + file.getName()); - logger.log(Level.WARNING, "Pasco Output not found: " + file.getPath()); - return; - } - - // Make sure the file the is not empty or the Scanner will - // throw a "No Line found" Exception - if (file.length() == 0) { - return; - } - - Scanner fileScanner; - try { - fileScanner = new Scanner(new FileInputStream(file.toString())); - } catch (FileNotFoundException ex) { - this.addErrorMessage(this.getName() + ": Error parsing IE history entry " + file.getName()); - logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); - return; - } - - while (fileScanner.hasNext()) { - String line = fileScanner.nextLine(); - if (!line.startsWith("URL")) { - continue; - } - - String[] lineBuff = line.split("\\t"); - - if (lineBuff.length < 4) { - logger.log(Level.INFO, "Found unrecognized IE history format."); - continue; - } - - String ddtime = lineBuff[2]; - String actime = lineBuff[3]; - Long ftime = (long) 0; - String user = ""; - String realurl = ""; - String domain = ""; - - /* We've seen two types of lines: - * URL http://XYZ.com .... - * URL Visited: Joe@http://XYZ.com .... - */ - if (lineBuff[1].contains("@")) { - String url[] = lineBuff[1].split("@", 2); - user = url[0]; - user = user.replace("Visited:", ""); - user = user.replace(":Host:", ""); - user = user.replaceAll("(:)(.*?)(:)", ""); - user = user.trim(); - realurl = url[1]; - realurl = realurl.replace("Visited:", ""); - realurl = realurl.replaceAll(":(.*?):", ""); - realurl = realurl.replace(":Host:", ""); - realurl = realurl.trim(); - } else { - user = ""; - realurl = lineBuff[1].trim(); - } - - domain = Util.extractDomain(realurl); - - if (!ddtime.isEmpty()) { - ddtime = ddtime.replace("T", " "); - ddtime = ddtime.substring(ddtime.length() - 5); - } - - if (!actime.isEmpty()) { - try { - Long epochtime = dateFormatter.parse(actime).getTime(); - ftime = epochtime.longValue(); - ftime = ftime / 1000; - } catch (ParseException e) { - this.addErrorMessage(this.getName() + ": Error parsing Internet Explorer History entry."); - logger.log(Level.SEVERE, "Error parsing Pasco results.", e); - } - } - - try { - BlackboardArtifact bbart = origFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); - Collection bbattributes = new ArrayList<>(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", realurl)); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", ftime)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", "")); - // @@@ NOte that other browser modules are adding TITLE in hre for the title - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "RecentActivity", user)); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error writing Internet Explorer web history artifact to the blackboard.", ex); - } - } - fileScanner.close(); - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - } - - @Override - public void complete() { - } - - @Override - public void stop() { - if (execPasco != null) { - execPasco.stop(); - execPasco = null; - } - - //call regular cleanup from complete() method - complete(); - } - - @Override - public String getDescription() { - return "Extracts activity from Internet Explorer browser, as well as recent documents in windows."; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +//class ExtractIE extends Extract { +// private static final Logger logger = Logger.getLogger(ExtractIE.class.getName()); +// private IngestServices services; +// +// //paths set in init() +// private String moduleTempResultsDir; +// private String PASCO_LIB_PATH; +// private String JAVA_PATH; +// +// final private static String MODULE_VERSION = "1.0"; +// private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); +// +// private ExecUtil execPasco; +// +// //hide public constructor to prevent from instantiation by ingest module loader +// ExtractIE() { +// moduleName = "Internet Explorer"; +// moduleTempResultsDir = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), "IE") + File.separator + "results"; +// JAVA_PATH = PlatformUtil.getJavaPath(); +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// dataFound = false; +// this.getBookmark(dataSource, controller); +// this.getCookie(dataSource, controller); +// this.getHistory(dataSource, controller); +// } +// +// /** +// * Finds the files storing bookmarks and creates artifacts +// * @param dataSource +// * @param controller +// */ +// private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { +// org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); +// List favoritesFiles = null; +// try { +// favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites"); +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); +// this.addErrorMessage(this.getName() + ": Error getting Internet Explorer Bookmarks."); +// return; +// } +// +// if (favoritesFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any IE bookmark files."); +// return; +// } +// +// dataFound = true; +// for (AbstractFile fav : favoritesFiles) { +// if (fav.getSize() == 0) { +// continue; +// } +// +// if (controller.isCancelled()) { +// break; +// } +// +// String url = getURLFromIEBookmarkFile(fav); +// +// String name = fav.getName(); +// Long datetime = fav.getCrtime(); +// String Tempdate = datetime.toString(); +// datetime = Long.valueOf(Tempdate); +// String domain = Util.extractDomain(url); +// +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", name)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", datetime)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes); +// } +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); +// } +// +// private String getURLFromIEBookmarkFile(AbstractFile fav) { +// BufferedReader reader = new BufferedReader(new InputStreamReader(new ReadContentInputStream(fav))); +// String line, url = ""; +// try { +// while ((line = reader.readLine()) != null) { +// // The actual shortcut line we are interested in is of the +// // form URL=http://path/to/website +// if (line.startsWith("URL")) { +// url = line.substring(line.indexOf("=") + 1); +// break; +// } +// } +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); +// this.addErrorMessage(this.getName() + ": Error parsing IE bookmark File " + fav.getName()); +// } catch (IndexOutOfBoundsException ex) { +// logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); +// this.addErrorMessage(this.getName() + ": Error parsing IE bookmark File " + fav.getName()); +// } finally { +// try { +// reader.close(); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed to close reader.", ex); +// } +// } +// +// return url; +// } +// +// /** +// * Finds files that store cookies and adds artifacts for them. +// * @param dataSource +// * @param controller +// */ +// private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { +// org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); +// List cookiesFiles = null; +// try { +// cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies"); +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Error getting cookie files for IE"); +// this.addErrorMessage(this.getName() + ": " + "Error getting Internet Explorer cookie files."); +// return; +// } +// +// if (cookiesFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any IE cookies files."); +// return; +// } +// +// dataFound = true; +// for (AbstractFile cookiesFile : cookiesFiles) { +// if (controller.isCancelled()) { +// break; +// } +// if (cookiesFile.getSize() == 0) { +// continue; +// } +// +// byte[] t = new byte[(int) cookiesFile.getSize()]; +// try { +// final int bytesRead = cookiesFile.read(t, 0, cookiesFile.getSize()); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error reading bytes of Internet Explorer cookie.", ex); +// this.addErrorMessage(this.getName() + ": Error reading Internet Explorer cookie " + cookiesFile.getName()); +// continue; +// } +// String cookieString = new String(t); +// String[] values = cookieString.split("\n"); +// String url = values.length > 2 ? values[2] : ""; +// String value = values.length > 1 ? values[1] : ""; +// String name = values.length > 0 ? values[0] : ""; +// Long datetime = cookiesFile.getCrtime(); +// String tempDate = datetime.toString(); +// datetime = Long.valueOf(tempDate); +// String domain = Util.extractDomain(url); +// +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : "")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); +// } +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); +// } +// +// /** +// * Locates index.dat files, runs Pasco on them, and creates artifacts. +// * @param dataSource +// * @param controller +// */ +// private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { +// logger.log(Level.INFO, "Pasco results path: " + moduleTempResultsDir); +// boolean foundHistory = false; +// +// final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); +// if (pascoRoot == null) { +// this.addErrorMessage(this.getName() + ": Unable to get IE History: pasco not found"); +// logger.log(Level.SEVERE, "Error finding pasco program "); +// return; +// } +// +// final String pascoHome = pascoRoot.getAbsolutePath(); +// logger.log(Level.INFO, "Pasco2 home: " + pascoHome); +// +// PASCO_LIB_PATH = pascoHome + File.separator + "pasco2.jar" + File.pathSeparator +// + pascoHome + File.separator + "*"; +// +// File resultsDir = new File(moduleTempResultsDir); +// resultsDir.mkdirs(); +// +// // get index.dat files +// org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); +// List indexFiles = null; +// try { +// indexFiles = fileManager.findFiles(dataSource, "index.dat"); +// } catch (TskCoreException ex) { +// this.addErrorMessage(this.getName() + ": Error getting Internet Explorer history files"); +// logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); +// return; +// } +// +// if (indexFiles.isEmpty()) { +// String msg = "No InternetExplorer history files found."; +// logger.log(Level.INFO, msg); +// return; +// } +// +// dataFound = true; +// String temps; +// String indexFileName; +// for (AbstractFile indexFile : indexFiles) { +// // Since each result represent an index.dat file, +// // just create these files with the following notation: +// // index.dat (i.e. index0.dat, index1.dat,..., indexN.dat) +// // Write each index.dat file to a temp directory. +// //BlackboardArtifact bbart = fsc.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); +// indexFileName = "index" + Integer.toString((int) indexFile.getId()) + ".dat"; +// //indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat"; +// temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; +// File datFile = new File(temps); +// if (controller.isCancelled()) { +// break; +// } +// try { +// ContentUtils.writeToFile(indexFile, datFile); +// } catch (IOException e) { +// logger.log(Level.SEVERE, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); +// this.addErrorMessage(this.getName() + ": Error while trying to write file:" + datFile.getAbsolutePath()); +// continue; +// } +// +// String filename = "pasco2Result." + indexFile.getId() + ".txt"; +// boolean bPascProcSuccess = executePasco(temps, filename); +// +// //At this point pasco2 proccessed the index files. +// //Now fetch the results, parse them and the delete the files. +// if (bPascProcSuccess) { +// parsePascoOutput(indexFile, filename); +// foundHistory = true; +// +// //Delete index.dat file since it was succcessfully by Pasco +// datFile.delete(); +// } else { +// logger.log(Level.WARNING, "pasco execution failed on: " + this.getName()); +// this.addErrorMessage(this.getName() + ": Error processing Internet Explorer history."); +// } +// } +// +// if (foundHistory) { +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); +// } +// } +// +// /** +// * Execute pasco on a single file that has been saved to disk. +// * @param indexFilePath Path to local index.dat file to analyze +// * @param outputFileName Name of file to save output to +// * @return false on error +// */ +// private boolean executePasco(String indexFilePath, String outputFileName) { +// boolean success = true; +// +// Writer writer = null; +// try { +// final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; +// logger.log(Level.INFO, "Writing pasco results to: " + outputFileFullPath); +// writer = new FileWriter(outputFileFullPath); +// execPasco = new ExecUtil(); +// execPasco.execute(writer, JAVA_PATH, +// "-cp", PASCO_LIB_PATH, +// "isi.pasco2.Main", "-T", "history", indexFilePath ); +// // @@@ Investigate use of history versus cache as type. +// } catch (IOException ex) { +// success = false; +// logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); +// } catch (InterruptedException ex) { +// success = false; +// logger.log(Level.SEVERE, "Pasco has been interrupted, failed to extract some web history from Internet Explorer.", ex); +// } +// finally { +// if (writer != null) { +// try { +// writer.flush(); +// writer.close(); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Error closing writer stream after for Pasco result", ex); +// } +// } +// } +// return success; +// } +// +// /** +// * parse Pasco output and create artifacts +// * @param origFile Original index.dat file that was analyzed to get this output +// * @param pascoOutputFileName name of pasco output file +// */ +// private void parsePascoOutput(AbstractFile origFile, String pascoOutputFileName) { +// +// String fnAbs = moduleTempResultsDir + File.separator + pascoOutputFileName; +// +// File file = new File(fnAbs); +// if (file.exists() == false) { +// this.addErrorMessage(this.getName() + ": Pasco output not found: " + file.getName()); +// logger.log(Level.WARNING, "Pasco Output not found: " + file.getPath()); +// return; +// } +// +// // Make sure the file the is not empty or the Scanner will +// // throw a "No Line found" Exception +// if (file.length() == 0) { +// return; +// } +// +// Scanner fileScanner; +// try { +// fileScanner = new Scanner(new FileInputStream(file.toString())); +// } catch (FileNotFoundException ex) { +// this.addErrorMessage(this.getName() + ": Error parsing IE history entry " + file.getName()); +// logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); +// return; +// } +// +// while (fileScanner.hasNext()) { +// String line = fileScanner.nextLine(); +// if (!line.startsWith("URL")) { +// continue; +// } +// +// String[] lineBuff = line.split("\\t"); +// +// if (lineBuff.length < 4) { +// logger.log(Level.INFO, "Found unrecognized IE history format."); +// continue; +// } +// +// String ddtime = lineBuff[2]; +// String actime = lineBuff[3]; +// Long ftime = (long) 0; +// String user = ""; +// String realurl = ""; +// String domain = ""; +// +// /* We've seen two types of lines: +// * URL http://XYZ.com .... +// * URL Visited: Joe@http://XYZ.com .... +// */ +// if (lineBuff[1].contains("@")) { +// String url[] = lineBuff[1].split("@", 2); +// user = url[0]; +// user = user.replace("Visited:", ""); +// user = user.replace(":Host:", ""); +// user = user.replaceAll("(:)(.*?)(:)", ""); +// user = user.trim(); +// realurl = url[1]; +// realurl = realurl.replace("Visited:", ""); +// realurl = realurl.replaceAll(":(.*?):", ""); +// realurl = realurl.replace(":Host:", ""); +// realurl = realurl.trim(); +// } else { +// user = ""; +// realurl = lineBuff[1].trim(); +// } +// +// domain = Util.extractDomain(realurl); +// +// if (!ddtime.isEmpty()) { +// ddtime = ddtime.replace("T", " "); +// ddtime = ddtime.substring(ddtime.length() - 5); +// } +// +// if (!actime.isEmpty()) { +// try { +// Long epochtime = dateFormatter.parse(actime).getTime(); +// ftime = epochtime.longValue(); +// ftime = ftime / 1000; +// } catch (ParseException e) { +// this.addErrorMessage(this.getName() + ": Error parsing Internet Explorer History entry."); +// logger.log(Level.SEVERE, "Error parsing Pasco results.", e); +// } +// } +// +// try { +// BlackboardArtifact bbart = origFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY); +// Collection bbattributes = new ArrayList<>(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", realurl)); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl))); +// +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", ftime)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", "")); +// // @@@ NOte that other browser modules are adding TITLE in hre for the title +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "Internet Explorer")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME.getTypeID(), "RecentActivity", user)); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error writing Internet Explorer web history artifact to the blackboard.", ex); +// } +// } +// fileScanner.close(); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public void stop() { +// if (execPasco != null) { +// execPasco.stop(); +// execPasco = null; +// } +// +// //call regular cleanup from complete() method +// complete(); +// } +// +// @Override +// public String getDescription() { +// return "Extracts activity from Internet Explorer browser, as well as recent documents in windows."; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 49da26db4f..545c154a40 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -20,553 +20,552 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.recentactivity; - -import java.io.*; -import java.io.File; -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.logging.Level; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.openide.modules.InstalledFileLocator; -import org.sleuthkit.autopsy.coreutils.ExecUtil; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.recentactivity.ExtractUSB.USBInfo; -import org.sleuthkit.datamodel.*; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.InputSource; -import org.xml.sax.SAXException; - +//package org.sleuthkit.autopsy.recentactivity; +// +//import java.io.*; +//import java.io.File; +//import java.text.ParseException; +//import java.text.SimpleDateFormat; +//import java.util.*; +//import java.util.logging.Level; +//import javax.xml.parsers.DocumentBuilder; +//import javax.xml.parsers.DocumentBuilderFactory; +//import javax.xml.parsers.ParserConfigurationException; +//import org.openide.modules.InstalledFileLocator; +//import org.sleuthkit.autopsy.coreutils.ExecUtil; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.datamodel.ContentUtils; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.recentactivity.ExtractUSB.USBInfo; +//import org.sleuthkit.datamodel.*; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.w3c.dom.Document; +//import org.w3c.dom.Element; +//import org.w3c.dom.Node; +//import org.w3c.dom.NodeList; +//import org.xml.sax.InputSource; +//import org.xml.sax.SAXException; +// /** * Extract windows registry data using regripper. * Runs two versions of regripper. One is the generally available set of plug-ins * and the second is a set that were customized for Autopsy to produce a more structured * output of XML so that we can parse and turn into blackboard artifacts. */ -class ExtractRegistry extends Extract { - - private Logger logger = Logger.getLogger(this.getClass().getName()); - private String RR_PATH; - private String RR_FULL_PATH; - private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper - private boolean rrFullFound = false; // true if we found the full version of regripper - final private static String MODULE_VERSION = "1.0"; - private ExecUtil execRR; - - //hide public constructor to prevent from instantiation by ingest module loader - ExtractRegistry() { - final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); - if (rrRoot == null) { - logger.log(Level.SEVERE, "RegRipper not found"); - rrFound = false; - return; - } else { - rrFound = true; - } - - final String rrHome = rrRoot.getAbsolutePath(); - logger.log(Level.INFO, "RegRipper home: " + rrHome); - - if (PlatformUtil.isWindowsOS()) { - RR_PATH = rrHome + File.separator + "rip.exe"; - } else { - RR_PATH = "perl " + rrHome + File.separator + "rip.pl"; - } - - final File rrFullRoot = InstalledFileLocator.getDefault().locate("rr-full", ExtractRegistry.class.getPackage().getName(), false); - if (rrFullRoot == null) { - logger.log(Level.SEVERE, "RegRipper Full not found"); - rrFullFound = false; - } else { - rrFullFound = true; - } - - final String rrFullHome = rrFullRoot.getAbsolutePath(); - logger.log(Level.INFO, "RegRipper Full home: " + rrFullHome); - - if (PlatformUtil.isWindowsOS()) { - RR_FULL_PATH = rrFullHome + File.separator + "rip.exe"; - } else { - RR_FULL_PATH = "perl " + rrFullHome + File.separator + "rip.pl"; - } - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - /** - * Search for the registry hives on the system. - * @param dataSource Data source to search for hives in. - * @return List of registry hives - */ - private List findRegistryFiles(Content dataSource) { - List allRegistryFiles = new ArrayList<>(); - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - - // find the user-specific ntuser-dat files - try { - allRegistryFiles.addAll(fileManager.findFiles(dataSource, "ntuser.dat")); - } - catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file."); - } - - // find the system hives' - String[] regFileNames = new String[] {"system", "software", "security", "sam"}; - for (String regFileName : regFileNames) { - try { - allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "/system32/config")); - } - catch (TskCoreException ex) { - String msg = "Error fetching registry file: " + regFileName; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - } - } - return allRegistryFiles; - } - - /** - * Identifies registry files in the database by mtimeItem, runs regripper on them, and parses the output. - * - * @param dataSource - * @param controller - */ - private void analyzeRegistryFiles(Content dataSource, IngestDataSourceWorkerController controller) { - List allRegistryFiles = findRegistryFiles(dataSource); - - // open the log file - FileWriter logFile = null; - try { - logFile = new FileWriter(RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + "regripper-info.txt"); - } catch (IOException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } - - ExtractUSB extrctr = new ExtractUSB(); - - int j = 0; - for (AbstractFile regFile : allRegistryFiles) { - String regFileName = regFile.getName(); - String regFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg") + File.separator + regFileName; - String outputPathBase = RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + regFileName + "-regripper-" + Integer.toString(j++); - File regFileNameLocalFile = new File(regFileNameLocal); - try { - ContentUtils.writeToFile(regFile, regFileNameLocalFile); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the temp registry file. {0}", ex); - this.addErrorMessage(this.getName() + ": Error analyzing registry file " + regFileName); - continue; - } - - if (controller.isCancelled()) { - break; - } - - try { - if (logFile != null) { - logFile.write(Integer.toString(j-1) + "\t" + regFile.getUniquePath() + "\n"); - } - } - catch (TskCoreException | IOException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } - - logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); - RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); - - if (controller.isCancelled()) { - break; - } - - // parse the autopsy-specific output - if (regOutputFiles.autopsyPlugins.isEmpty() == false) { - if (parseAutopsyPluginOutput(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { - this.addErrorMessage(this.getName() + ": Failed parsing registry file results " + regFileName); - } - } - - // create a RAW_TOOL artifact for the full output - if (regOutputFiles.fullPlugins.isEmpty() == false) { - try { - BlackboardArtifact art = regFile.newArtifact(ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID()); - BlackboardAttribute att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "RegRipper"); - art.addAttribute(att); - - FileReader fread = new FileReader(regOutputFiles.fullPlugins); - BufferedReader input = new BufferedReader(fread); - - StringBuilder sb = new StringBuilder(); - try { - while (true) { - String s = input.readLine(); - if (s == null) { - break; - } - sb.append(s).append("\n"); - } - } catch (IOException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } finally { - try { - input.close(); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed to close reader.", ex); - } - } - att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), "RecentActivity", sb.toString()); - art.addAttribute(att); - } catch (FileNotFoundException ex) { - this.addErrorMessage(this.getName() + ": Error reading registry file - " + regOutputFiles.fullPlugins); - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } catch (TskCoreException ex) { - // TODO - add error message here? - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } - } - - // delete the hive - regFileNameLocalFile.delete(); - } - - try { - if (logFile != null) { - logFile.close(); - } - } catch (IOException ex) { - java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); - } - } - - private class RegOutputFiles { - public String autopsyPlugins = ""; - public String fullPlugins = ""; - } - - /** - * Execute regripper on the given registry. - * @param regFilePath Path to local copy of registry - * @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on - */ - private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { - String autopsyType = ""; // Type argument for rr for autopsy-specific modules - String fullType = ""; // Type argument for rr for full set of modules - - RegOutputFiles regOutputFiles = new RegOutputFiles(); - - if (regFilePath.toLowerCase().contains("system")) { - autopsyType = "autopsysystem"; - fullType = "system"; - } - else if (regFilePath.toLowerCase().contains("software")) { - autopsyType = "autopsysoftware"; - fullType = "software"; - } - else if (regFilePath.toLowerCase().contains("ntuser")) { - autopsyType = "autopsyntuser"; - fullType = "ntuser"; - } - else if (regFilePath.toLowerCase().contains("sam")) { - fullType = "sam"; - } - else if (regFilePath.toLowerCase().contains("security")) { - fullType = "security"; - } - else { - return regOutputFiles; - } - - // run the autopsy-specific set of modules - if (!autopsyType.isEmpty() && rrFound) { - // TODO - add error messages - Writer writer = null; - try { - regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; - logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); - writer = new FileWriter(regOutputFiles.autopsyPlugins); - execRR = new ExecUtil(); - execRR.execute(writer, RR_PATH, - "-r", regFilePath, "-f", autopsyType); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); - this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); - } catch (InterruptedException ex) { - logger.log(Level.SEVERE, "RegRipper has been interrupted, failed to parse registry.", ex); - this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); - } finally { - if (writer != null) { - try { - writer.close(); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error closing output writer after running RegRipper", ex); - } - } - } - } - - // run the full set of rr modules - if (!fullType.isEmpty() && rrFullFound) { - Writer writer = null; - try { - regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; - logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); - writer = new FileWriter(regOutputFiles.fullPlugins); - execRR = new ExecUtil(); - execRR.execute(writer, RR_FULL_PATH, - "-r", regFilePath, "-f", fullType); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Unable to run full RegRipper and process parse some registry files.", ex); - this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); - } catch (InterruptedException ex) { - logger.log(Level.SEVERE, "RegRipper full has been interrupted, failed to parse registry.", ex); - this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); - } finally { - if (writer != null) { - try { - writer.close(); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error closing output writer after running RegRipper full", ex); - } - } - } - } - - return regOutputFiles; - } - - // @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT - private boolean parseAutopsyPluginOutput(String regRecord, long orgId, ExtractUSB extrctr) { - FileInputStream fstream = null; - try { - SleuthkitCase tempDb = currentCase.getSleuthkitCase(); - - // Read the file in and create a Document and elements - File regfile = new File(regRecord); - fstream = new FileInputStream(regfile); - - String regString = new Scanner(fstream, "UTF-8").useDelimiter("\\Z").next(); - String startdoc = ""; - String result = regString.replaceAll("----------------------------------------", ""); - result = result.replaceAll("\\n", ""); - result = result.replaceAll("\\r", ""); - result = result.replaceAll("'", "'"); - result = result.replaceAll("&", "&"); - String enddoc = ""; - String stringdoc = startdoc + result + enddoc; - DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); - Document doc = builder.parse(new InputSource(new StringReader(stringdoc))); - - // cycle through the elements in the doc - Element oroot = doc.getDocumentElement(); - NodeList children = oroot.getChildNodes(); - int len = children.getLength(); - for (int i = 0; i < len; i++) { - Element tempnode = (Element) children.item(i); - - String dataType = tempnode.getNodeName(); - - NodeList timenodes = tempnode.getElementsByTagName("mtime"); - Long mtime = null; - if (timenodes.getLength() > 0) { - Element timenode = (Element) timenodes.item(0); - String etime = timenode.getTextContent(); - try { - Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(etime).getTime(); - mtime = epochtime.longValue(); - String Tempdate = mtime.toString(); - mtime = Long.valueOf(Tempdate) / 1000; - } catch (ParseException ex) { - logger.log(Level.WARNING, "Failed to parse epoch time when parsing the registry."); - } - } - - NodeList artroots = tempnode.getElementsByTagName("artifacts"); - if (artroots.getLength() == 0) { - // If there isn't an artifact node, skip this entry - continue; - } - - Element artroot = (Element) artroots.item(0); - NodeList myartlist = artroot.getChildNodes(); - String winver = ""; - for (int j = 0; j < myartlist.getLength(); j++) { - Node artchild = myartlist.item(j); - // If it has attributes, then it is an Element (based off API) - if (artchild.hasAttributes()) { - Element artnode = (Element) artchild; - - String value = artnode.getTextContent().trim(); - Collection bbattributes = new ArrayList(); - - if ("recentdocs".equals(dataType)) { - // BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", dataType, mtime)); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", dataType, mtimeItem)); - // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", dataType, value)); - // bbart.addAttributes(bbattributes); - // @@@ BC: Why are we ignoring this... - } - else if ("usb".equals(dataType)) { - try { - Long usbMtime = Long.parseLong(artnode.getAttribute("mtime")); - usbMtime = Long.valueOf(usbMtime.toString()); - - BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", usbMtime)); - String dev = artnode.getAttribute("dev"); - String model = dev; - if (dev.toLowerCase().contains("vid")) { - USBInfo info = extrctr.get(dev); - if(info.getVendor()!=null) - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), "RecentActivity", info.getVendor())); - if(info.getProduct() != null) - model = info.getProduct(); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", model)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); - } - } - else if ("uninstall".equals(dataType)) { - Long itemMtime = null; - try { - Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(artnode.getAttribute("mtime")).getTime(); - itemMtime = epochtime.longValue(); - itemMtime = itemMtime / 1000; - } catch (ParseException e) { - logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact."); - } - - try { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", value)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", itemMtime)); - BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); - } - } - else if ("WinVersion".equals(dataType)) { - String name = artnode.getAttribute("name"); - - if (name.contains("ProductName")) { - winver = value; - } - if (name.contains("CSDVersion")) { - winver = winver + " " + value; - } - if (name.contains("InstallDate")) { - Long installtime = null; - try { - Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(value).getTime(); - installtime = epochtime.longValue(); - String Tempdate = installtime.toString(); - installtime = Long.valueOf(Tempdate) / 1000; - } catch (ParseException e) { - logger.log(Level.SEVERE, "RegRipper::Conversion on DateTime -> ", e); - } - try { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", winver)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", installtime)); - BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); - } - } - } - else if ("office".equals(dataType)) { - String name = artnode.getAttribute("name"); - - try { - BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); - // @@@ BC: Consider removing this after some more testing. It looks like an Mtime associated with the root key and not the individual item - if (mtime != null) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", mtime)); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", name)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", artnode.getNodeName())); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard."); - } - } - } - } - } - return true; - } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "Error finding the registry file."); - } catch (SAXException ex) { - logger.log(Level.SEVERE, "Error parsing the registry XML: {0}", ex); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error building the document parser: {0}", ex); - } catch (ParserConfigurationException ex) { - logger.log(Level.SEVERE, "Error configuring the registry parser: {0}", ex); - } finally { - try { - if (fstream != null) { - fstream.close(); - } - } catch (IOException ex) { - } - } - return false; - } - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - analyzeRegistryFiles(dataSource, controller); - } - - @Override - public void init(IngestModuleInit initContext) { - } - - @Override - public void complete() { - } - - @Override - public void stop() { - if (execRR != null) { - execRR.stop(); - execRR = null; - } - } - - @Override - public String getName() { - return "Registry"; - } - - @Override - public String getDescription() { - return "Extracts activity from the Windows registry utilizing RegRipper."; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +//class ExtractRegistry extends Extract { +// +// private Logger logger = Logger.getLogger(this.getClass().getName()); +// private String RR_PATH; +// private String RR_FULL_PATH; +// private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper +// private boolean rrFullFound = false; // true if we found the full version of regripper +// final private static String MODULE_VERSION = "1.0"; +// private ExecUtil execRR; +// +// //hide public constructor to prevent from instantiation by ingest module loader +// ExtractRegistry() { +// final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); +// if (rrRoot == null) { +// logger.log(Level.SEVERE, "RegRipper not found"); +// rrFound = false; +// return; +// } else { +// rrFound = true; +// } +// +// final String rrHome = rrRoot.getAbsolutePath(); +// logger.log(Level.INFO, "RegRipper home: " + rrHome); +// +// if (PlatformUtil.isWindowsOS()) { +// RR_PATH = rrHome + File.separator + "rip.exe"; +// } else { +// RR_PATH = "perl " + rrHome + File.separator + "rip.pl"; +// } +// +// final File rrFullRoot = InstalledFileLocator.getDefault().locate("rr-full", ExtractRegistry.class.getPackage().getName(), false); +// if (rrFullRoot == null) { +// logger.log(Level.SEVERE, "RegRipper Full not found"); +// rrFullFound = false; +// } else { +// rrFullFound = true; +// } +// +// final String rrFullHome = rrFullRoot.getAbsolutePath(); +// logger.log(Level.INFO, "RegRipper Full home: " + rrFullHome); +// +// if (PlatformUtil.isWindowsOS()) { +// RR_FULL_PATH = rrFullHome + File.separator + "rip.exe"; +// } else { +// RR_FULL_PATH = "perl " + rrFullHome + File.separator + "rip.pl"; +// } +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// /** +// * Search for the registry hives on the system. +// * @param dataSource Data source to search for hives in. +// * @return List of registry hives +// */ +// private List findRegistryFiles(Content dataSource) { +// List allRegistryFiles = new ArrayList<>(); +// org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); +// +// // find the user-specific ntuser-dat files +// try { +// allRegistryFiles.addAll(fileManager.findFiles(dataSource, "ntuser.dat")); +// } +// catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file."); +// } +// +// // find the system hives' +// String[] regFileNames = new String[] {"system", "software", "security", "sam"}; +// for (String regFileName : regFileNames) { +// try { +// allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "/system32/config")); +// } +// catch (TskCoreException ex) { +// String msg = "Error fetching registry file: " + regFileName; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// } +// } +// return allRegistryFiles; +// } +// +// /** +// * Identifies registry files in the database by mtimeItem, runs regripper on them, and parses the output. +// * +// * @param dataSource +// * @param controller +// */ +// private void analyzeRegistryFiles(Content dataSource, IngestDataSourceWorkerController controller) { +// List allRegistryFiles = findRegistryFiles(dataSource); +// +// // open the log file +// FileWriter logFile = null; +// try { +// logFile = new FileWriter(RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + "regripper-info.txt"); +// } catch (IOException ex) { +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } +// +// ExtractUSB extrctr = new ExtractUSB(); +// +// int j = 0; +// for (AbstractFile regFile : allRegistryFiles) { +// String regFileName = regFile.getName(); +// String regFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg") + File.separator + regFileName; +// String outputPathBase = RAImageIngestModule.getRAOutputPath(currentCase, "reg") + File.separator + regFileName + "-regripper-" + Integer.toString(j++); +// File regFileNameLocalFile = new File(regFileNameLocal); +// try { +// ContentUtils.writeToFile(regFile, regFileNameLocalFile); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the temp registry file. {0}", ex); +// this.addErrorMessage(this.getName() + ": Error analyzing registry file " + regFileName); +// continue; +// } +// +// if (controller.isCancelled()) { +// break; +// } +// +// try { +// if (logFile != null) { +// logFile.write(Integer.toString(j-1) + "\t" + regFile.getUniquePath() + "\n"); +// } +// } +// catch (TskCoreException | IOException ex) { +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } +// +// logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); +// RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); +// +// if (controller.isCancelled()) { +// break; +// } +// +// // parse the autopsy-specific output +// if (regOutputFiles.autopsyPlugins.isEmpty() == false) { +// if (parseAutopsyPluginOutput(regOutputFiles.autopsyPlugins, regFile.getId(), extrctr) == false) { +// this.addErrorMessage(this.getName() + ": Failed parsing registry file results " + regFileName); +// } +// } +// +// // create a RAW_TOOL artifact for the full output +// if (regOutputFiles.fullPlugins.isEmpty() == false) { +// try { +// BlackboardArtifact art = regFile.newArtifact(ARTIFACT_TYPE.TSK_TOOL_OUTPUT.getTypeID()); +// BlackboardAttribute att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "RegRipper"); +// art.addAttribute(att); +// +// FileReader fread = new FileReader(regOutputFiles.fullPlugins); +// BufferedReader input = new BufferedReader(fread); +// +// StringBuilder sb = new StringBuilder(); +// try { +// while (true) { +// String s = input.readLine(); +// if (s == null) { +// break; +// } +// sb.append(s).append("\n"); +// } +// } catch (IOException ex) { +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } finally { +// try { +// input.close(); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed to close reader.", ex); +// } +// } +// att = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), "RecentActivity", sb.toString()); +// art.addAttribute(att); +// } catch (FileNotFoundException ex) { +// this.addErrorMessage(this.getName() + ": Error reading registry file - " + regOutputFiles.fullPlugins); +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } catch (TskCoreException ex) { +// // TODO - add error message here? +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } +// } +// +// // delete the hive +// regFileNameLocalFile.delete(); +// } +// +// try { +// if (logFile != null) { +// logFile.close(); +// } +// } catch (IOException ex) { +// java.util.logging.Logger.getLogger(ExtractRegistry.class.getName()).log(Level.SEVERE, null, ex); +// } +// } +// +// private class RegOutputFiles { +// public String autopsyPlugins = ""; +// public String fullPlugins = ""; +// } +// +// /** +// * Execute regripper on the given registry. +// * @param regFilePath Path to local copy of registry +// * @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on +// */ +// private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) { +// String autopsyType = ""; // Type argument for rr for autopsy-specific modules +// String fullType = ""; // Type argument for rr for full set of modules +// +// RegOutputFiles regOutputFiles = new RegOutputFiles(); +// +// if (regFilePath.toLowerCase().contains("system")) { +// autopsyType = "autopsysystem"; +// fullType = "system"; +// } +// else if (regFilePath.toLowerCase().contains("software")) { +// autopsyType = "autopsysoftware"; +// fullType = "software"; +// } +// else if (regFilePath.toLowerCase().contains("ntuser")) { +// autopsyType = "autopsyntuser"; +// fullType = "ntuser"; +// } +// else if (regFilePath.toLowerCase().contains("sam")) { +// fullType = "sam"; +// } +// else if (regFilePath.toLowerCase().contains("security")) { +// fullType = "security"; +// } +// else { +// return regOutputFiles; +// } +// +// // run the autopsy-specific set of modules +// if (!autopsyType.isEmpty() && rrFound) { +// // TODO - add error messages +// Writer writer = null; +// try { +// regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; +// logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); +// writer = new FileWriter(regOutputFiles.autopsyPlugins); +// execRR = new ExecUtil(); +// execRR.execute(writer, RR_PATH, +// "-r", regFilePath, "-f", autopsyType); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); +// this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); +// } catch (InterruptedException ex) { +// logger.log(Level.SEVERE, "RegRipper has been interrupted, failed to parse registry.", ex); +// this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); +// } finally { +// if (writer != null) { +// try { +// writer.close(); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error closing output writer after running RegRipper", ex); +// } +// } +// } +// } +// +// // run the full set of rr modules +// if (!fullType.isEmpty() && rrFullFound) { +// Writer writer = null; +// try { +// regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; +// logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); +// writer = new FileWriter(regOutputFiles.fullPlugins); +// execRR = new ExecUtil(); +// execRR.execute(writer, RR_FULL_PATH, +// "-r", regFilePath, "-f", fullType); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Unable to run full RegRipper and process parse some registry files.", ex); +// this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); +// } catch (InterruptedException ex) { +// logger.log(Level.SEVERE, "RegRipper full has been interrupted, failed to parse registry.", ex); +// this.addErrorMessage(this.getName() + ": Failed to analyze registry file"); +// } finally { +// if (writer != null) { +// try { +// writer.close(); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error closing output writer after running RegRipper full", ex); +// } +// } +// } +// } +// +// return regOutputFiles; +// } +// +// // @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT +// private boolean parseAutopsyPluginOutput(String regRecord, long orgId, ExtractUSB extrctr) { +// FileInputStream fstream = null; +// try { +// SleuthkitCase tempDb = currentCase.getSleuthkitCase(); +// +// // Read the file in and create a Document and elements +// File regfile = new File(regRecord); +// fstream = new FileInputStream(regfile); +// +// String regString = new Scanner(fstream, "UTF-8").useDelimiter("\\Z").next(); +// String startdoc = ""; +// String result = regString.replaceAll("----------------------------------------", ""); +// result = result.replaceAll("\\n", ""); +// result = result.replaceAll("\\r", ""); +// result = result.replaceAll("'", "'"); +// result = result.replaceAll("&", "&"); +// String enddoc = ""; +// String stringdoc = startdoc + result + enddoc; +// DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder(); +// Document doc = builder.parse(new InputSource(new StringReader(stringdoc))); +// +// // cycle through the elements in the doc +// Element oroot = doc.getDocumentElement(); +// NodeList children = oroot.getChildNodes(); +// int len = children.getLength(); +// for (int i = 0; i < len; i++) { +// Element tempnode = (Element) children.item(i); +// +// String dataType = tempnode.getNodeName(); +// +// NodeList timenodes = tempnode.getElementsByTagName("mtime"); +// Long mtime = null; +// if (timenodes.getLength() > 0) { +// Element timenode = (Element) timenodes.item(0); +// String etime = timenode.getTextContent(); +// try { +// Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(etime).getTime(); +// mtime = epochtime.longValue(); +// String Tempdate = mtime.toString(); +// mtime = Long.valueOf(Tempdate) / 1000; +// } catch (ParseException ex) { +// logger.log(Level.WARNING, "Failed to parse epoch time when parsing the registry."); +// } +// } +// +// NodeList artroots = tempnode.getElementsByTagName("artifacts"); +// if (artroots.getLength() == 0) { +// // If there isn't an artifact node, skip this entry +// continue; +// } +// +// Element artroot = (Element) artroots.item(0); +// NodeList myartlist = artroot.getChildNodes(); +// String winver = ""; +// for (int j = 0; j < myartlist.getLength(); j++) { +// Node artchild = myartlist.item(j); +// // If it has attributes, then it is an Element (based off API) +// if (artchild.hasAttributes()) { +// Element artnode = (Element) artchild; +// +// String value = artnode.getTextContent().trim(); +// Collection bbattributes = new ArrayList(); +// +// if ("recentdocs".equals(dataType)) { +// // BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); +// // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", dataType, mtime)); +// // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", dataType, mtimeItem)); +// // bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", dataType, value)); +// // bbart.addAttributes(bbattributes); +// // @@@ BC: Why are we ignoring this... +// } +// else if ("usb".equals(dataType)) { +// try { +// Long usbMtime = Long.parseLong(artnode.getAttribute("mtime")); +// usbMtime = Long.valueOf(usbMtime.toString()); +// +// BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", usbMtime)); +// String dev = artnode.getAttribute("dev"); +// String model = dev; +// if (dev.toLowerCase().contains("vid")) { +// USBInfo info = extrctr.get(dev); +// if(info.getVendor()!=null) +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), "RecentActivity", info.getVendor())); +// if(info.getProduct() != null) +// model = info.getProduct(); +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), "RecentActivity", model)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID.getTypeID(), "RecentActivity", value)); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error adding device attached artifact to blackboard."); +// } +// } +// else if ("uninstall".equals(dataType)) { +// Long itemMtime = null; +// try { +// Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(artnode.getAttribute("mtime")).getTime(); +// itemMtime = epochtime.longValue(); +// itemMtime = itemMtime / 1000; +// } catch (ParseException e) { +// logger.log(Level.WARNING, "Failed to parse epoch time for installed program artifact."); +// } +// +// try { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", value)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", itemMtime)); +// BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); +// } +// } +// else if ("WinVersion".equals(dataType)) { +// String name = artnode.getAttribute("name"); +// +// if (name.contains("ProductName")) { +// winver = value; +// } +// if (name.contains("CSDVersion")) { +// winver = winver + " " + value; +// } +// if (name.contains("InstallDate")) { +// Long installtime = null; +// try { +// Long epochtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyy").parse(value).getTime(); +// installtime = epochtime.longValue(); +// String Tempdate = installtime.toString(); +// installtime = Long.valueOf(Tempdate) / 1000; +// } catch (ParseException e) { +// logger.log(Level.SEVERE, "RegRipper::Conversion on DateTime -> ", e); +// } +// try { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", winver)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", installtime)); +// BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_INSTALLED_PROG); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard."); +// } +// } +// } +// else if ("office".equals(dataType)) { +// String name = artnode.getAttribute("name"); +// +// try { +// BlackboardArtifact bbart = tempDb.getContentById(orgId).newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT); +// // @@@ BC: Consider removing this after some more testing. It looks like an Mtime associated with the root key and not the individual item +// if (mtime != null) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", mtime)); +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", name)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", value)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", artnode.getNodeName())); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard."); +// } +// } +// } +// } +// } +// return true; +// } catch (FileNotFoundException ex) { +// logger.log(Level.SEVERE, "Error finding the registry file."); +// } catch (SAXException ex) { +// logger.log(Level.SEVERE, "Error parsing the registry XML: {0}", ex); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error building the document parser: {0}", ex); +// } catch (ParserConfigurationException ex) { +// logger.log(Level.SEVERE, "Error configuring the registry parser: {0}", ex); +// } finally { +// try { +// if (fstream != null) { +// fstream.close(); +// } +// } catch (IOException ex) { +// } +// } +// return false; +// } +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// analyzeRegistryFiles(dataSource, controller); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public void stop() { +// if (execRR != null) { +// execRR.stop(); +// execRR = null; +// } +// } +// +// @Override +// public String getName() { +// return "Registry"; +// } +// +// @Override +// public String getDescription() { +// return "Extracts activity from the Windows registry utilizing RegRipper."; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index e5e4967525..c3de313c18 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -22,482 +22,481 @@ */ package org.sleuthkit.autopsy.recentactivity; -import java.io.File; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskCoreException; +//import java.io.File; +//import java.io.IOException; +//import java.io.UnsupportedEncodingException; +//import java.net.URLDecoder; +//import java.util.ArrayList; +//import java.util.Collection; +//import java.util.HashMap; +//import java.util.List; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.casemodule.services.FileManager; +//import org.sleuthkit.autopsy.datamodel.ContentUtils; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.datamodel.TskCoreException; /** * Firefox recent activity extraction */ -class Firefox extends Extract { - - private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; - private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; - private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; - private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; - private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; - private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; - - final private static String MODULE_VERSION = "1.0"; - private IngestServices services; - - //hide public constructor to prevent from instantiation by ingest module loader - Firefox() { - moduleName = "FireFox"; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - dataFound = false; - this.getHistory(dataSource, controller); - this.getBookmark(dataSource, controller); - this.getDownload(dataSource, controller); - this.getCookie(dataSource, controller); - } - - private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { - //Make these seperate, this is for history - - //List FFSqlitedb = this.extractFiles(dataSource, "select * from tsk_files where name LIKE '%places.sqlite%' and name NOT LIKE '%journal%' and parent_path LIKE '%Firefox%'"); - - FileManager fileManager = currentCase.getServices().getFileManager(); - List historyFiles = null; - try { - historyFiles = fileManager.findFiles(dataSource, "%places.sqlite%", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching internet history files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (historyFiles.isEmpty()) { - String msg = "No FireFox history files found."; - logger.log(Level.INFO, msg); - return; - } - - dataFound = true; - - int j = 0; - for (AbstractFile historyFile : historyFiles) { - if (historyFile.getSize() == 0) { - continue; - } - - String fileName = historyFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; - try { - ContentUtils.writeToFile(historyFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the sqlite db for firefox web history artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - List> tempList = this.dbConnect(temps, historyQuery); - logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("visit_date").toString())))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", ((result.get("ref").toString() != null) ? result.get("ref").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); - - } - ++j; - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); - } - - /** - * Queries for bookmark files and adds artifacts - * @param dataSource - * @param controller - */ - private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List bookmarkFiles = null; - try { - bookmarkFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching bookmark files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (bookmarkFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any firefox bookmark files."); - return; - } - - dataFound = true; - - int j = 0; - for (AbstractFile bookmarkFile : bookmarkFiles) { - if (bookmarkFile.getSize() == 0) { - continue; - } - String fileName = bookmarkFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; - try { - ContentUtils.writeToFile(bookmarkFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the sqlite db for firefox bookmark artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - List> tempList = this.dbConnect(temps, bookmarkQuery); - logger.log(Level.INFO, moduleName + "- Now getting bookmarks from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); - if (Long.valueOf(result.get("dateAdded").toString()) > 0) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); - - } - ++j; - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); - } - - /** - * Queries for cookies file and adds artifacts - * @param dataSource - * @param controller - */ - private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { - FileManager fileManager = currentCase.getServices().getFileManager(); - List cookiesFiles = null; - try { - cookiesFiles = fileManager.findFiles(dataSource, "cookies.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching cookies files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (cookiesFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any Firefox cookie files."); - return; - } - - dataFound = true; - int j = 0; - for (AbstractFile cookiesFile : cookiesFiles) { - if (cookiesFile.getSize() == 0) { - continue; - } - String fileName = cookiesFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; - try { - ContentUtils.writeToFile(cookiesFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the sqlite db for firefox cookie artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - boolean checkColumn = Util.checkColumn("creationTime", "moz_cookies", temps); - String query = null; - if (checkColumn) { - query = cookieQuery; - } else { - query = cookieQueryV3; - } - - List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("host").toString() != null) ? result.get("host").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", (Long.valueOf(result.get("lastAccessed").toString())))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - - if (checkColumn == true) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("creationTime").toString())))); - } - String domain = Util.extractDomain(result.get("host").toString()); - domain = domain.replaceFirst("^\\.+(?!$)", ""); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); - } - ++j; - dbFile.delete(); - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); - } - - /** - * Queries for downloads files and adds artifacts - * @param dataSource - * @param controller - */ - private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { - getDownloadPreVersion24(dataSource, controller); - getDownloadVersion24(dataSource, controller); - } - - /** - * Finds downloads artifacts from Firefox data from versions before 24.0. - * - * Downloads were stored in a separate downloads database. - * - * @param dataSource - * @param controller - */ - private void getDownloadPreVersion24(Content dataSource, IngestDataSourceWorkerController controller) { - - FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; - try { - downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching 'downloads' files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (downloadsFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any pre-version-24.0 Firefox download files."); - return; - } - - dataFound = true; - int j = 0; - for (AbstractFile downloadsFile : downloadsFiles) { - if (downloadsFile.getSize() == 0) { - continue; - } - String fileName = downloadsFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; - int errors = 0; - try { - ContentUtils.writeToFile(downloadsFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the sqlite db for firefox download artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - - List> tempList = this.dbConnect(temps, downloadQuery); - logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - - Collection bbattributes = new ArrayList(); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? result.get("source").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString())))); - - String target = result.get("target").toString(); - - if (target != null) { - try { - String decodedTarget = URLDecoder.decode(target.toString().replaceAll("file:///", ""), "UTF-8"); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", decodedTarget)); - long pathID = Util.findID(dataSource, decodedTarget); - if (pathID != -1) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", pathID)); - } - } catch (UnsupportedEncodingException ex) { - logger.log(Level.SEVERE, "Error decoding Firefox download URL in " + temps, ex); - errors++; - } - } - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("source").toString() != null) ? result.get("source").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); - - } - if (errors > 0) { - this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web history artifacts."); - } - j++; - dbFile.delete(); - break; - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); - } - - /** - * Gets download artifacts from Firefox data from version 24. - * - * Downloads are stored in the places database. - * - * @param dataSource - * @param controller - */ - private void getDownloadVersion24(Content dataSource, IngestDataSourceWorkerController controller) { - FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; - try { - downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); - } catch (TskCoreException ex) { - String msg = "Error fetching 'downloads' files for Firefox."; - logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); - return; - } - - if (downloadsFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any version-24.0 Firefox download files."); - return; - } - - dataFound = true; - int j = 0; - for (AbstractFile downloadsFile : downloadsFiles) { - if (downloadsFile.getSize() == 0) { - continue; - } - String fileName = downloadsFile.getName(); - String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + "-downloads" + j + ".db"; - int errors = 0; - try { - ContentUtils.writeToFile(downloadsFile, new File(temps)); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error writing the sqlite db for firefox download artifacts.{0}", ex); - this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); - continue; - } - File dbFile = new File(temps); - if (controller.isCancelled()) { - dbFile.delete(); - break; - } - - List> tempList = this.dbConnect(temps, downloadQueryVersion24); - - logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); - for (HashMap result : tempList) { - - Collection bbattributes = new ArrayList(); - - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); - //TODO Revisit usage of deprecated constructor as per TSK-583 - //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); - - String target = result.get("target").toString(); - if (target != null) { - try { - String decodedTarget = URLDecoder.decode(target.toString().replaceAll("file:///", ""), "UTF-8"); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", decodedTarget)); - long pathID = Util.findID(dataSource, decodedTarget); - if (pathID != -1) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", pathID)); - } - } catch (UnsupportedEncodingException ex) { - logger.log(Level.SEVERE, "Error decoding Firefox download URL in " + temps, ex); - errors++; - } - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", Long.valueOf(result.get("lastModified").toString()))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); - - } - if (errors > 0) { - this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web download artifacts."); - } - j++; - dbFile.delete(); - break; - } - - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - } - - @Override - public void complete() { - } - - @Override - public void stop() { - } - - @Override - public String getDescription() { - return "Extracts activity from the Mozilla FireFox browser."; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +//class Firefox extends Extract { +// +// private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; +// private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; +// private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; +// private static final String bookmarkQuery = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) as dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; +// private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; +// private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; +// +// final private static String MODULE_VERSION = "1.0"; +// private IngestServices services; +// +// //hide public constructor to prevent from instantiation by ingest module loader +// Firefox() { +// moduleName = "FireFox"; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// dataFound = false; +// this.getHistory(dataSource, controller); +// this.getBookmark(dataSource, controller); +// this.getDownload(dataSource, controller); +// this.getCookie(dataSource, controller); +// } +// +// private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) { +// //Make these seperate, this is for history +// +// //List FFSqlitedb = this.extractFiles(dataSource, "select * from tsk_files where name LIKE '%places.sqlite%' and name NOT LIKE '%journal%' and parent_path LIKE '%Firefox%'"); +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List historyFiles = null; +// try { +// historyFiles = fileManager.findFiles(dataSource, "%places.sqlite%", "Firefox"); +// } catch (TskCoreException ex) { +// String msg = "Error fetching internet history files for Firefox."; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (historyFiles.isEmpty()) { +// String msg = "No FireFox history files found."; +// logger.log(Level.INFO, msg); +// return; +// } +// +// dataFound = true; +// +// int j = 0; +// for (AbstractFile historyFile : historyFiles) { +// if (historyFile.getSize() == 0) { +// continue; +// } +// +// String fileName = historyFile.getName(); +// String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; +// try { +// ContentUtils.writeToFile(historyFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the sqlite db for firefox web history artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// List> tempList = this.dbConnect(temps, historyQuery); +// logger.log(Level.INFO, moduleName + "- Now getting history from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("visit_date").toString())))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REFERRER.getTypeID(), "RecentActivity", ((result.get("ref").toString() != null) ? result.get("ref").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes); +// +// } +// ++j; +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY)); +// } +// +// /** +// * Queries for bookmark files and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List bookmarkFiles = null; +// try { +// bookmarkFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); +// } catch (TskCoreException ex) { +// String msg = "Error fetching bookmark files for Firefox."; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (bookmarkFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any firefox bookmark files."); +// return; +// } +// +// dataFound = true; +// +// int j = 0; +// for (AbstractFile bookmarkFile : bookmarkFiles) { +// if (bookmarkFile.getSize() == 0) { +// continue; +// } +// String fileName = bookmarkFile.getName(); +// String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; +// try { +// ContentUtils.writeToFile(bookmarkFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the sqlite db for firefox bookmark artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// List> tempList = this.dbConnect(temps, bookmarkQuery); +// logger.log(Level.INFO, moduleName + "- Now getting bookmarks from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", ((result.get("title").toString() != null) ? result.get("title").toString() : ""))); +// if (Long.valueOf(result.get("dateAdded").toString()) > 0) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("dateAdded").toString())))); +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes); +// +// } +// ++j; +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK)); +// } +// +// /** +// * Queries for cookies file and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) { +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List cookiesFiles = null; +// try { +// cookiesFiles = fileManager.findFiles(dataSource, "cookies.sqlite", "Firefox"); +// } catch (TskCoreException ex) { +// String msg = "Error fetching cookies files for Firefox."; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (cookiesFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any Firefox cookie files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// for (AbstractFile cookiesFile : cookiesFiles) { +// if (cookiesFile.getSize() == 0) { +// continue; +// } +// String fileName = cookiesFile.getName(); +// String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; +// try { +// ContentUtils.writeToFile(cookiesFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the sqlite db for firefox cookie artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// boolean checkColumn = Util.checkColumn("creationTime", "moz_cookies", temps); +// String query = null; +// if (checkColumn) { +// query = cookieQuery; +// } else { +// query = cookieQueryV3; +// } +// +// List> tempList = this.dbConnect(temps, query); +// logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("host").toString() != null) ? result.get("host").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", (Long.valueOf(result.get("lastAccessed").toString())))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", ((result.get("name").toString() != null) ? result.get("name").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), "RecentActivity", ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); +// +// if (checkColumn == true) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("creationTime").toString())))); +// } +// String domain = Util.extractDomain(result.get("host").toString()); +// domain = domain.replaceFirst("^\\.+(?!$)", ""); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", domain)); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes); +// } +// ++j; +// dbFile.delete(); +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE)); +// } +// +// /** +// * Queries for downloads files and adds artifacts +// * @param dataSource +// * @param controller +// */ +// private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) { +// getDownloadPreVersion24(dataSource, controller); +// getDownloadVersion24(dataSource, controller); +// } +// +// /** +// * Finds downloads artifacts from Firefox data from versions before 24.0. +// * +// * Downloads were stored in a separate downloads database. +// * +// * @param dataSource +// * @param controller +// */ +// private void getDownloadPreVersion24(Content dataSource, IngestDataSourceWorkerController controller) { +// +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List downloadsFiles = null; +// try { +// downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox"); +// } catch (TskCoreException ex) { +// String msg = "Error fetching 'downloads' files for Firefox."; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (downloadsFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any pre-version-24.0 Firefox download files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// for (AbstractFile downloadsFile : downloadsFiles) { +// if (downloadsFile.getSize() == 0) { +// continue; +// } +// String fileName = downloadsFile.getName(); +// String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + j + ".db"; +// int errors = 0; +// try { +// ContentUtils.writeToFile(downloadsFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the sqlite db for firefox download artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// +// List> tempList = this.dbConnect(temps, downloadQuery); +// logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// +// Collection bbattributes = new ArrayList(); +// +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? result.get("source").toString() : ""))); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString())))); +// +// String target = result.get("target").toString(); +// +// if (target != null) { +// try { +// String decodedTarget = URLDecoder.decode(target.toString().replaceAll("file:///", ""), "UTF-8"); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", decodedTarget)); +// long pathID = Util.findID(dataSource, decodedTarget); +// if (pathID != -1) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", pathID)); +// } +// } catch (UnsupportedEncodingException ex) { +// logger.log(Level.SEVERE, "Error decoding Firefox download URL in " + temps, ex); +// errors++; +// } +// } +// +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("source").toString() != null) ? result.get("source").toString() : "")))); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); +// +// } +// if (errors > 0) { +// this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web history artifacts."); +// } +// j++; +// dbFile.delete(); +// break; +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); +// } +// +// /** +// * Gets download artifacts from Firefox data from version 24. +// * +// * Downloads are stored in the places database. +// * +// * @param dataSource +// * @param controller +// */ +// private void getDownloadVersion24(Content dataSource, IngestDataSourceWorkerController controller) { +// FileManager fileManager = currentCase.getServices().getFileManager(); +// List downloadsFiles = null; +// try { +// downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); +// } catch (TskCoreException ex) { +// String msg = "Error fetching 'downloads' files for Firefox."; +// logger.log(Level.WARNING, msg); +// this.addErrorMessage(this.getName() + ": " + msg); +// return; +// } +// +// if (downloadsFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any version-24.0 Firefox download files."); +// return; +// } +// +// dataFound = true; +// int j = 0; +// for (AbstractFile downloadsFile : downloadsFiles) { +// if (downloadsFile.getSize() == 0) { +// continue; +// } +// String fileName = downloadsFile.getName(); +// String temps = RAImageIngestModule.getRATempPath(currentCase, "firefox") + File.separator + fileName + "-downloads" + j + ".db"; +// int errors = 0; +// try { +// ContentUtils.writeToFile(downloadsFile, new File(temps)); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error writing the sqlite db for firefox download artifacts.{0}", ex); +// this.addErrorMessage(this.getName() + ": Error while trying to analyze file:" + fileName); +// continue; +// } +// File dbFile = new File(temps); +// if (controller.isCancelled()) { +// dbFile.delete(); +// break; +// } +// +// List> tempList = this.dbConnect(temps, downloadQueryVersion24); +// +// logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); +// for (HashMap result : tempList) { +// +// Collection bbattributes = new ArrayList(); +// +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", ((result.get("url").toString() != null) ? result.get("url").toString() : ""))); +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); +// //TODO Revisit usage of deprecated constructor as per TSK-583 +// //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString())))); +// +// String target = result.get("target").toString(); +// if (target != null) { +// try { +// String decodedTarget = URLDecoder.decode(target.toString().replaceAll("file:///", ""), "UTF-8"); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", decodedTarget)); +// long pathID = Util.findID(dataSource, decodedTarget); +// if (pathID != -1) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", pathID)); +// } +// } catch (UnsupportedEncodingException ex) { +// logger.log(Level.SEVERE, "Error decoding Firefox download URL in " + temps, ex); +// errors++; +// } +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", Long.valueOf(result.get("lastModified").toString()))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox")); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); +// +// } +// if (errors > 0) { +// this.addErrorMessage(this.getName() + ": Error parsing " + errors + " Firefox web download artifacts."); +// } +// j++; +// dbFile.delete(); +// break; +// } +// +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD)); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public void stop() { +// } +// +// @Override +// public String getDescription() { +// return "Extracts activity from the Mozilla FireFox browser."; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index e403c78117..b14d619e7e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -20,223 +20,223 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.sleuthkit.autopsy.recentactivity; -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.datamodel.Content; +//import java.io.File; +//import java.util.ArrayList; +//import java.util.List; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.datamodel.Content; /** * Recent activity image ingest module * */ -public final class RAImageIngestModule extends IngestModuleDataSource { - - private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); - private static RAImageIngestModule defaultInstance = null; - private IngestServices services; - private static int messageId = 0; - private StringBuilder subCompleted = new StringBuilder(); - private ArrayList modules; - private List browserModules; - final private static String MODULE_VERSION = Version.getVersion(); - - //public constructor is required - //as multiple instances are created for processing multiple images simultenously - public RAImageIngestModule() { - } - - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Started " + dataSource.getName())); - - controller.switchToDeterminate(modules.size()); - controller.progress(0); - ArrayList errors = new ArrayList<>(); - - for (int i = 0; i < modules.size(); i++) { - Extract module = modules.get(i); - if (controller.isCancelled()) { - logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", module.getName()); - break; - } - - try { - module.process(pipelineContext, dataSource, controller); - } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception occurred in " + module.getName(), ex); - subCompleted.append(module.getName()).append(" failed - see log for details
"); - errors.add(module.getName() + " had errors -- see log"); - } - controller.progress(i + 1); - errors.addAll(module.getErrorMessages()); - } - - // create the final message for inbox - StringBuilder errorMessage = new StringBuilder(); - String errorMsgSubject; - MessageType msgLevel = MessageType.INFO; - if (errors.isEmpty() == false) { - msgLevel = MessageType.ERROR; - errorMessage.append("

Errors encountered during analysis:

    \n"); - for (String msg : errors) { - errorMessage.append("
  • ").append(msg).append("
  • \n"); - } - errorMessage.append("
\n"); - - if (errors.size() == 1) { - errorMsgSubject = "1 error found"; - } else { - errorMsgSubject = errors.size() + " errors found"; - } - } else { - errorMessage.append("

No errors encountered.

"); - errorMsgSubject = "No errors reported"; - } - final IngestMessage msg = IngestMessage.createMessage(++messageId, msgLevel, this, "Finished " + dataSource.getName()+ " - " + errorMsgSubject, errorMessage.toString()); - services.postMessage(msg); - - StringBuilder historyMsg = new StringBuilder(); - historyMsg.append("

Browser Data on ").append(dataSource.getName()).append(":

    \n"); - for (Extract module : browserModules) { - historyMsg.append("
  • ").append(module.getName()); - historyMsg.append(": ").append((module.foundData()) ? " Found." : " Not Found."); - historyMsg.append("
  • "); - } - historyMsg.append("
"); - final IngestMessage inboxMsg = IngestMessage.createMessage(++messageId, MessageType.INFO, this, dataSource.getName() + " - Browser Results", historyMsg.toString()); - services.postMessage(inboxMsg); - } - - @Override - public void complete() { - logger.log(Level.INFO, "complete() " + this.toString()); - - // close modules - for (int i = 0; i < modules.size(); i++) { - Extract module = modules.get(i); - try { - module.complete(); - } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception occurred when completing " + module.getName(), ex); - subCompleted.append(module.getName()).append(" failed to complete - see log for details
"); - } - } - - //module specific cleanup due to completion here - } - - @Override - public String getName() { - return "Recent Activity"; - } - - @Override - public String getDescription() { - return "Extracts recent user activity, such as Web browsing, recently used documents and installed programs."; - } - - @Override - public void init(IngestModuleInit initContext) { - modules = new ArrayList<>(); - browserModules = new ArrayList(); - logger.log(Level.INFO, "init() {0}", this.toString()); - services = IngestServices.getDefault(); - - final Extract registry = new ExtractRegistry(); - final Extract iexplore = new ExtractIE(); - final Extract recentDocuments= new RecentDocumentsByLnk(); - final Extract chrome = new Chrome(); - final Extract firefox = new Firefox(); - final Extract SEUQA = new SearchEngineURLQueryAnalyzer(); - - modules.add(chrome); - modules.add(firefox); - modules.add(iexplore); - modules.add(recentDocuments); - // this needs to run after the web browser modules - modules.add(SEUQA); - - // this runs last because it is slowest - modules.add(registry); - - browserModules.add(chrome); - browserModules.add(firefox); - browserModules.add(iexplore); - - for (Extract module : modules) { - try { - module.init(initContext); - } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception during init() of " + module.getName(), ex); - } - } - } - - @Override - public void stop() { - logger.log(Level.INFO, "RAImageIngetModule::stop()"); - for (Extract module : modules) { - try { - module.stop(); - } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception during stop() of " + module.getName(), ex); - } - } - logger.log(Level.INFO, "Recent Activity processes has been shutdown."); - } - - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - - /** - * Get the temp path for a specific sub-module in recent activity. Will create the dir if it doesn't exist. - * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp folder to prevent name collisions - * @return Path to directory - */ - protected static String getRATempPath(Case a_case, String mod) { - String tmpDir = a_case.getTempDirectory() + File.separator + "RecentActivity" + File.separator + mod; - File dir = new File(tmpDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return tmpDir; - } - - /** - * Get the output path for a specific sub-module in recent activity. Will create the dir if it doesn't exist. - * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp folder to prevent name collisions - * @return Path to directory - */ - protected static String getRAOutputPath(Case a_case, String mod) { - String tmpDir = a_case.getModulesOutputDirAbsPath() + File.separator + "RecentActivity" + File.separator + mod; - File dir = new File(tmpDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return tmpDir; - } -} +//public final class RAImageIngestModule extends IngestModuleDataSource { +// +// private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName()); +// private static RAImageIngestModule defaultInstance = null; +// private IngestServices services; +// private static int messageId = 0; +// private StringBuilder subCompleted = new StringBuilder(); +// private ArrayList modules; +// private List browserModules; +// final private static String MODULE_VERSION = Version.getVersion(); +// +// //public constructor is required +// //as multiple instances are created for processing multiple images simultenously +// public RAImageIngestModule() { +// } +// +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Started " + dataSource.getName())); +// +// controller.switchToDeterminate(modules.size()); +// controller.progress(0); +// ArrayList errors = new ArrayList<>(); +// +// for (int i = 0; i < modules.size(); i++) { +// Extract module = modules.get(i); +// if (controller.isCancelled()) { +// logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", module.getName()); +// break; +// } +// +// try { +// module.process(pipelineContext, dataSource, controller); +// } catch (Exception ex) { +// logger.log(Level.SEVERE, "Exception occurred in " + module.getName(), ex); +// subCompleted.append(module.getName()).append(" failed - see log for details
"); +// errors.add(module.getName() + " had errors -- see log"); +// } +// controller.progress(i + 1); +// errors.addAll(module.getErrorMessages()); +// } +// +// // create the final message for inbox +// StringBuilder errorMessage = new StringBuilder(); +// String errorMsgSubject; +// MessageType msgLevel = MessageType.INFO; +// if (errors.isEmpty() == false) { +// msgLevel = MessageType.ERROR; +// errorMessage.append("

Errors encountered during analysis:

    \n"); +// for (String msg : errors) { +// errorMessage.append("
  • ").append(msg).append("
  • \n"); +// } +// errorMessage.append("
\n"); +// +// if (errors.size() == 1) { +// errorMsgSubject = "1 error found"; +// } else { +// errorMsgSubject = errors.size() + " errors found"; +// } +// } else { +// errorMessage.append("

No errors encountered.

"); +// errorMsgSubject = "No errors reported"; +// } +// final IngestMessage msg = IngestMessage.createMessage(++messageId, msgLevel, this, "Finished " + dataSource.getName()+ " - " + errorMsgSubject, errorMessage.toString()); +// services.postMessage(msg); +// +// StringBuilder historyMsg = new StringBuilder(); +// historyMsg.append("

Browser Data on ").append(dataSource.getName()).append(":

    \n"); +// for (Extract module : browserModules) { +// historyMsg.append("
  • ").append(module.getName()); +// historyMsg.append(": ").append((module.foundData()) ? " Found." : " Not Found."); +// historyMsg.append("
  • "); +// } +// historyMsg.append("
"); +// final IngestMessage inboxMsg = IngestMessage.createMessage(++messageId, MessageType.INFO, this, dataSource.getName() + " - Browser Results", historyMsg.toString()); +// services.postMessage(inboxMsg); +// } +// +// @Override +// public void complete() { +// logger.log(Level.INFO, "complete() " + this.toString()); +// +// // close modules +// for (int i = 0; i < modules.size(); i++) { +// Extract module = modules.get(i); +// try { +// module.complete(); +// } catch (Exception ex) { +// logger.log(Level.SEVERE, "Exception occurred when completing " + module.getName(), ex); +// subCompleted.append(module.getName()).append(" failed to complete - see log for details
"); +// } +// } +// +// //module specific cleanup due to completion here +// } +// +// @Override +// public String getName() { +// return "Recent Activity"; +// } +// +// @Override +// public String getDescription() { +// return "Extracts recent user activity, such as Web browsing, recently used documents and installed programs."; +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// modules = new ArrayList<>(); +// browserModules = new ArrayList(); +// logger.log(Level.INFO, "init() {0}", this.toString()); +// services = IngestServices.getDefault(); +// +// final Extract registry = new ExtractRegistry(); +// final Extract iexplore = new ExtractIE(); +// final Extract recentDocuments= new RecentDocumentsByLnk(); +// final Extract chrome = new Chrome(); +// final Extract firefox = new Firefox(); +// final Extract SEUQA = new SearchEngineURLQueryAnalyzer(); +// +// modules.add(chrome); +// modules.add(firefox); +// modules.add(iexplore); +// modules.add(recentDocuments); +// // this needs to run after the web browser modules +// modules.add(SEUQA); +// +// // this runs last because it is slowest +// modules.add(registry); +// +// browserModules.add(chrome); +// browserModules.add(firefox); +// browserModules.add(iexplore); +// +// for (Extract module : modules) { +// try { +// module.init(initContext); +// } catch (Exception ex) { +// logger.log(Level.SEVERE, "Exception during init() of " + module.getName(), ex); +// } +// } +// } +// +// @Override +// public void stop() { +// logger.log(Level.INFO, "RAImageIngetModule::stop()"); +// for (Extract module : modules) { +// try { +// module.stop(); +// } catch (Exception ex) { +// logger.log(Level.SEVERE, "Exception during stop() of " + module.getName(), ex); +// } +// } +// logger.log(Level.INFO, "Recent Activity processes has been shutdown."); +// } +// +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +// +// /** +// * Get the temp path for a specific sub-module in recent activity. Will create the dir if it doesn't exist. +// * @param a_case Case that directory is for +// * @param mod Module name that will be used for a sub folder in the temp folder to prevent name collisions +// * @return Path to directory +// */ +// protected static String getRATempPath(Case a_case, String mod) { +// String tmpDir = a_case.getTempDirectory() + File.separator + "RecentActivity" + File.separator + mod; +// File dir = new File(tmpDir); +// if (dir.exists() == false) { +// dir.mkdirs(); +// } +// return tmpDir; +// } +// +// /** +// * Get the output path for a specific sub-module in recent activity. Will create the dir if it doesn't exist. +// * @param a_case Case that directory is for +// * @param mod Module name that will be used for a sub folder in the temp folder to prevent name collisions +// * @return Path to directory +// */ +// protected static String getRAOutputPath(Case a_case, String mod) { +// String tmpDir = a_case.getModulesOutputDirAbsPath() + File.separator + "RecentActivity" + File.separator + mod; +// File dir = new File(tmpDir); +// if (dir.exists() == false) { +// dir.mkdirs(); +// } +// return tmpDir; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index 880427e832..37479ab65b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -23,125 +23,124 @@ package org.sleuthkit.autopsy.recentactivity; // imports -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.coreutils.Logger; -import java.util.Collection; -import org.sleuthkit.autopsy.coreutils.JLNK; -import org.sleuthkit.autopsy.coreutils.JLnkParser; -import org.sleuthkit.autopsy.coreutils.JLnkParserException; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.datamodel.*; +//import java.util.ArrayList; +//import java.util.List; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import java.util.Collection; +//import org.sleuthkit.autopsy.coreutils.JLNK; +//import org.sleuthkit.autopsy.coreutils.JLnkParser; +//import org.sleuthkit.autopsy.coreutils.JLnkParserException; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.datamodel.*; /** * Recent documents class that will extract recent documents in the form of *.lnk files */ -class RecentDocumentsByLnk extends Extract { - private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName()); - private IngestServices services; - final private static String MODULE_VERSION = "1.0"; - - /** - * Find the documents that Windows stores about recent documents and make artifacts. - * @param dataSource - * @param controller - */ - private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) { - - org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List recentFiles = null; - try { - recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent"); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error searching for .lnk files."); - this.addErrorMessage(this.getName() + ": Error getting lnk Files."); - return; - } - - if (recentFiles.isEmpty()) { - logger.log(Level.INFO, "Didn't find any recent files."); - return; - } - - dataFound = true; - for (AbstractFile recentFile : recentFiles) { - if (controller.isCancelled()) { - break; - } - - if (recentFile.getSize() == 0) { - continue; - } - JLNK lnk = null; - JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize()); - try { - lnk = lnkParser.parse(); - } catch (JLnkParserException e) { - //TODO should throw a specific checked exception - boolean unalloc = recentFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC) - || recentFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC); - if (unalloc == false) { - logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e); - this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName()); - } - continue; - } - - Collection bbattributes = new ArrayList(); - String path = lnk.getBestPath(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, path))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", recentFile.getCrtime())); - this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); - } - services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT)); - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - dataFound = false; - this.getRecentDocuments(dataSource, controller); - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - } - - @Override - public void complete() { - } - - @Override - public void stop() { - //call regular cleanup from complete() method - complete(); - } - - @Override - public String getDescription() { - return "Extracts recent documents in windows."; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +//class RecentDocumentsByLnk extends Extract { +// private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName()); +// private IngestServices services; +// final private static String MODULE_VERSION = "1.0"; +// +// /** +// * Find the documents that Windows stores about recent documents and make artifacts. +// * @param dataSource +// * @param controller +// */ +// private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) { +// +// org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); +// List recentFiles = null; +// try { +// recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent"); +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, "Error searching for .lnk files."); +// this.addErrorMessage(this.getName() + ": Error getting lnk Files."); +// return; +// } +// +// if (recentFiles.isEmpty()) { +// logger.log(Level.INFO, "Didn't find any recent files."); +// return; +// } +// +// dataFound = true; +// for (AbstractFile recentFile : recentFiles) { +// if (controller.isCancelled()) { +// break; +// } +// +// if (recentFile.getSize() == 0) { +// continue; +// } +// JLNK lnk = null; +// JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize()); +// try { +// lnk = lnkParser.parse(); +// } catch (JLnkParserException e) { +// //TODO should throw a specific checked exception +// boolean unalloc = recentFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC) +// || recentFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC); +// if (unalloc == false) { +// logger.log(Level.SEVERE, "Error lnk parsing the file to get recent files" + recentFile, e); +// this.addErrorMessage(this.getName() + ": Error parsing Recent File " + recentFile.getName()); +// } +// continue; +// } +// +// Collection bbattributes = new ArrayList(); +// String path = lnk.getBestPath(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, path))); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", recentFile.getCrtime())); +// this.addArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); +// } +// services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_RECENT_OBJECT)); +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// dataFound = false; +// this.getRecentDocuments(dataSource, controller); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public void stop() { +// //call regular cleanup from complete() method +// complete(); +// } +// +// @Override +// public String getDescription() { +// return "Extracts recent documents in windows."; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index 4477271dd9..3b44db0412 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -19,40 +19,38 @@ package org.sleuthkit.autopsy.recentactivity; - -import java.io.File; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.net.URLDecoder; -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.logging.Level; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.XMLUtil; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskException; -import org.w3c.dom.Document; -import org.w3c.dom.NamedNodeMap; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXException; - +//import java.io.File; +//import java.io.IOException; +//import java.io.UnsupportedEncodingException; +//import java.net.URLDecoder; +//import java.util.ArrayList; +//import java.util.Collection; +//import java.util.HashMap; +//import java.util.Map; +//import java.util.Set; +//import java.util.logging.Level; +//import javax.xml.parsers.DocumentBuilder; +//import javax.xml.parsers.DocumentBuilderFactory; +//import javax.xml.parsers.ParserConfigurationException; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.coreutils.XMLUtil; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.datamodel.TskException; +//import org.w3c.dom.Document; +//import org.w3c.dom.NamedNodeMap; +//import org.w3c.dom.NodeList; +//import org.xml.sax.SAXException; +// /** * This module attempts to extract web queries from major search engines by * querying the blackboard for web history and bookmark artifacts, and @@ -62,350 +60,350 @@ import org.xml.sax.SAXException; * To add search engines, edit SearchEngines.xml under RecentActivity * */ -class SearchEngineURLQueryAnalyzer extends Extract { - - private IngestServices services; - - private static final String MODULE_NAME = "Search Engine URL Query Analyzer"; - private final static String MODULE_VERSION = "1.0"; - - private static final String XMLFILE = "SEUQAMappings.xml"; - private static final String XSDFILE = "SearchEngineSchema.xsd"; - - - private static String[] searchEngineNames; - private static SearchEngineURLQueryAnalyzer.SearchEngine[] engines; - private static Document xmlinput; - private static final SearchEngineURLQueryAnalyzer.SearchEngine NullEngine = new SearchEngineURLQueryAnalyzer.SearchEngine("NONE", "NONE", new HashMap()); - - - //hide public constructor to prevent from instantiation by ingest module loader - SearchEngineURLQueryAnalyzer() { - - } - - private static class SearchEngine { - private String _engineName; - private String _domainSubstring; - private Map _splits; - private int _count; - - SearchEngine(String engineName, String domainSubstring, Map splits){ - _engineName = engineName; - _domainSubstring = domainSubstring; - _splits = splits; - _count = 0; - } - - void increment(){ - ++_count; - } - - String getEngineName(){ - return _engineName; - } - - String getDomainSubstring(){ - return _domainSubstring; - } - - int getTotal(){ - return _count; - } - - Set> getSplits(){ - return this._splits.entrySet(); - } - - @Override - public String toString(){ - String split = " "; - for(Map.Entry kvp : getSplits()){ - split = split + "[ " + kvp.getKey() + " :: " + kvp.getValue() + " ]" + ", "; - } - return "Name: " + _engineName + "\n Domain Substring: " + _domainSubstring + "\n count: " + _count + "\n Split Tokens: \n " + split; - } - - } - - private void createEngines(){ - NodeList nlist = xmlinput.getElementsByTagName("SearchEngine"); - SearchEngineURLQueryAnalyzer.SearchEngine[] listEngines = new SearchEngineURLQueryAnalyzer.SearchEngine[nlist.getLength()]; - for(int i = 0;i < nlist.getLength(); i++){ - NamedNodeMap nnm = nlist.item(i).getAttributes(); - - String EngineName = nnm.getNamedItem("engine").getNodeValue(); - String EnginedomainSubstring = nnm.getNamedItem("domainSubstring").getNodeValue(); - Map splits = new HashMap(); - - NodeList listSplits = xmlinput.getElementsByTagName("splitToken"); - for(int k = 0; k kvp : eng.getSplits()){ - if(url.contains(kvp.getKey())){ - x = split2(url, kvp.getValue()); - break; - } - } - try { //try to decode the url - String decoded = URLDecoder.decode(x, "UTF-8"); - return decoded; - } catch (UnsupportedEncodingException uee) { //if it fails, return the encoded string - logger.log(Level.FINE, "Error during URL decoding ", uee); - return x; - } - } - - /** - * Splits URLs based on a delimeter (key). .contains() and .split() - * - * @param url The URL to be split - * @param value the delimeter value used to split the URL into its - * search token, extracted from the xml. - * @return The extracted search query - * - */ - private String split2(String url, String value) { - String basereturn = "NoQuery"; - String v = value; - //Want to determine if string contains a string based on splitkey, but we want to split the string on splitKeyConverted due to regex - if (value.contains("\\?")) { - v = value.replace("\\?", "?"); - } - String[] sp = url.split(v); - if (sp.length >= 2) { - if (sp[sp.length - 1].contains("&")) { - basereturn = sp[sp.length - 1].split("&")[0]; - } else { - basereturn = sp[sp.length - 1]; - } - } - return basereturn; - } - - private void getURLs(Content dataSource, IngestDataSourceWorkerController controller) { - int totalQueries = 0; - try { - //from blackboard_artifacts - Collection listArtifacts = currentCase.getSleuthkitCase().getMatchingArtifacts("WHERE (`artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID() - + "' OR `artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID() + "') "); //List of every 'web_history' and 'bookmark' artifact - logger.info("Processing " + listArtifacts.size() + " blackboard artifacts."); - getAll: - for (BlackboardArtifact artifact : listArtifacts) { - //initializing default attributes - String query = ""; - String searchEngineDomain = ""; - String browser = ""; - long last_accessed = -1; - - long fileId = artifact.getObjectID(); - boolean isFromSource = tskCase.isFileFromSource(dataSource, fileId); - if (!isFromSource) { - //File was from a different dataSource. Skipping. - continue; - } - - AbstractFile file = tskCase.getAbstractFileById(fileId); - if (file == null ) { - continue; - } - - SearchEngineURLQueryAnalyzer.SearchEngine se = NullEngine; - //from blackboard_attributes - Collection listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID()); - getAttributes: - for (BlackboardAttribute attribute : listAttributes) { - if (controller.isCancelled()) { - break getAll; //User cancled the process. - } - if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) { - final String urlString = attribute.getValueString(); - se = getSearchEngine(urlString); - if (!se.equals(NullEngine)) { - query = extractSearchEngineQuery(attribute.getValueString()); - if (query.equals("NoQuery") || query.equals("")) { //False positive match, artifact was not a query. - break getAttributes; - } - } else if (se.equals(NullEngine)) { - break getAttributes; //could not determine type. Will move onto next artifact - } - } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID()) { - browser = attribute.getValueString(); - } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID()) { - searchEngineDomain = attribute.getValueString(); - } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) { - last_accessed = attribute.getValueLong(); - } - } - - if (!se.equals(NullEngine) && !query.equals("NoQuery") && !query.equals("")) { - Collection bbattributes = new ArrayList(); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), MODULE_NAME, searchEngineDomain)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), MODULE_NAME, query)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), MODULE_NAME, browser)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), MODULE_NAME, last_accessed)); - this.addArtifact(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, file, bbattributes); - se.increment(); - ++totalQueries; - } - } - } catch (TskException e) { - logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); - } finally { - if (controller.isCancelled()) { - logger.info("Operation terminated by user."); - } - services.fireModuleDataEvent(new ModuleDataEvent("RecentActivity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)); - logger.info("Extracted " + totalQueries + " queries from the blackboard"); - } - } - - private String getTotals() { - String total = ""; - if (engines == null) { - return total; - } - for (SearchEngineURLQueryAnalyzer.SearchEngine se : engines) { - total+= se.getEngineName() + " : "+ se.getTotal() + "\n"; - } - return total; - } - - @Override - public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - this.getURLs(dataSource, controller); - logger.info("Search Engine stats: \n" + getTotals()); - } - - @Override - public void init(IngestModuleInit initContext) { - try{ - services = IngestServices.getDefault(); - PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE); - init2(); - } - catch(IOException e){ - logger.log(Level.SEVERE, "Unable to find " + XMLFILE , e); - } - } - - - private void init2() { - try { - String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; - File f = new File(path); - logger.log(Level.INFO, "Load successful"); - DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); - DocumentBuilder db = dbf.newDocumentBuilder(); - Document xml = db.parse(f); - xmlinput = xml; - - if (!XMLUtil.xmlIsValid(xml, SearchEngineURLQueryAnalyzer.class, XSDFILE)) { - logger.log(Level.WARNING, "Error loading Search Engines: could not validate against [" + XSDFILE + "], results may not be accurate."); - } - createEngines(); - getSearchEngineNames(); - } catch (IOException e) { - logger.log(Level.SEVERE, "Was not able to load SEUQAMappings.xml", e); - } - catch(ParserConfigurationException pce){ - logger.log(Level.SEVERE, "Unable to build XML parser", pce); - } - catch(SAXException sxe){ - logger.log(Level.SEVERE, "Unable to parse XML file", sxe); - } - } - - - - @Override - public void complete() { - logger.info("Search Engine URL Query Analyzer has completed."); - } - - - @Override - public void stop() { - logger.info("Attempted to stop Search Engine URL Query Analyzer, but operation is not supported; skipping..."); - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - String total = ""; - for(String name : searchEngineNames){ - total += name + "\n"; - } - return "Extracts search queries on the following search engines: \n" + total; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } -} +//class SearchEngineURLQueryAnalyzer extends Extract { +// +// private IngestServices services; +// +// private static final String MODULE_NAME = "Search Engine URL Query Analyzer"; +// private final static String MODULE_VERSION = "1.0"; +// +// private static final String XMLFILE = "SEUQAMappings.xml"; +// private static final String XSDFILE = "SearchEngineSchema.xsd"; +// +// +// private static String[] searchEngineNames; +// private static SearchEngineURLQueryAnalyzer.SearchEngine[] engines; +// private static Document xmlinput; +// private static final SearchEngineURLQueryAnalyzer.SearchEngine NullEngine = new SearchEngineURLQueryAnalyzer.SearchEngine("NONE", "NONE", new HashMap()); +// +// +// //hide public constructor to prevent from instantiation by ingest module loader +// SearchEngineURLQueryAnalyzer() { +// +// } +// +// private static class SearchEngine { +// private String _engineName; +// private String _domainSubstring; +// private Map _splits; +// private int _count; +// +// SearchEngine(String engineName, String domainSubstring, Map splits){ +// _engineName = engineName; +// _domainSubstring = domainSubstring; +// _splits = splits; +// _count = 0; +// } +// +// void increment(){ +// ++_count; +// } +// +// String getEngineName(){ +// return _engineName; +// } +// +// String getDomainSubstring(){ +// return _domainSubstring; +// } +// +// int getTotal(){ +// return _count; +// } +// +// Set> getSplits(){ +// return this._splits.entrySet(); +// } +// +// @Override +// public String toString(){ +// String split = " "; +// for(Map.Entry kvp : getSplits()){ +// split = split + "[ " + kvp.getKey() + " :: " + kvp.getValue() + " ]" + ", "; +// } +// return "Name: " + _engineName + "\n Domain Substring: " + _domainSubstring + "\n count: " + _count + "\n Split Tokens: \n " + split; +// } +// +// } +// +// private void createEngines(){ +// NodeList nlist = xmlinput.getElementsByTagName("SearchEngine"); +// SearchEngineURLQueryAnalyzer.SearchEngine[] listEngines = new SearchEngineURLQueryAnalyzer.SearchEngine[nlist.getLength()]; +// for(int i = 0;i < nlist.getLength(); i++){ +// NamedNodeMap nnm = nlist.item(i).getAttributes(); +// +// String EngineName = nnm.getNamedItem("engine").getNodeValue(); +// String EnginedomainSubstring = nnm.getNamedItem("domainSubstring").getNodeValue(); +// Map splits = new HashMap(); +// +// NodeList listSplits = xmlinput.getElementsByTagName("splitToken"); +// for(int k = 0; k kvp : eng.getSplits()){ +// if(url.contains(kvp.getKey())){ +// x = split2(url, kvp.getValue()); +// break; +// } +// } +// try { //try to decode the url +// String decoded = URLDecoder.decode(x, "UTF-8"); +// return decoded; +// } catch (UnsupportedEncodingException uee) { //if it fails, return the encoded string +// logger.log(Level.FINE, "Error during URL decoding ", uee); +// return x; +// } +// } +// +// /** +// * Splits URLs based on a delimeter (key). .contains() and .split() +// * +// * @param url The URL to be split +// * @param value the delimeter value used to split the URL into its +// * search token, extracted from the xml. +// * @return The extracted search query +// * +// */ +// private String split2(String url, String value) { +// String basereturn = "NoQuery"; +// String v = value; +// //Want to determine if string contains a string based on splitkey, but we want to split the string on splitKeyConverted due to regex +// if (value.contains("\\?")) { +// v = value.replace("\\?", "?"); +// } +// String[] sp = url.split(v); +// if (sp.length >= 2) { +// if (sp[sp.length - 1].contains("&")) { +// basereturn = sp[sp.length - 1].split("&")[0]; +// } else { +// basereturn = sp[sp.length - 1]; +// } +// } +// return basereturn; +// } +// +// private void getURLs(Content dataSource, IngestDataSourceWorkerController controller) { +// int totalQueries = 0; +// try { +// //from blackboard_artifacts +// Collection listArtifacts = currentCase.getSleuthkitCase().getMatchingArtifacts("WHERE (`artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID() +// + "' OR `artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID() + "') "); //List of every 'web_history' and 'bookmark' artifact +// logger.info("Processing " + listArtifacts.size() + " blackboard artifacts."); +// getAll: +// for (BlackboardArtifact artifact : listArtifacts) { +// //initializing default attributes +// String query = ""; +// String searchEngineDomain = ""; +// String browser = ""; +// long last_accessed = -1; +// +// long fileId = artifact.getObjectID(); +// boolean isFromSource = tskCase.isFileFromSource(dataSource, fileId); +// if (!isFromSource) { +// //File was from a different dataSource. Skipping. +// continue; +// } +// +// AbstractFile file = tskCase.getAbstractFileById(fileId); +// if (file == null ) { +// continue; +// } +// +// SearchEngineURLQueryAnalyzer.SearchEngine se = NullEngine; +// //from blackboard_attributes +// Collection listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID()); +// getAttributes: +// for (BlackboardAttribute attribute : listAttributes) { +// if (controller.isCancelled()) { +// break getAll; //User cancled the process. +// } +// if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) { +// final String urlString = attribute.getValueString(); +// se = getSearchEngine(urlString); +// if (!se.equals(NullEngine)) { +// query = extractSearchEngineQuery(attribute.getValueString()); +// if (query.equals("NoQuery") || query.equals("")) { //False positive match, artifact was not a query. +// break getAttributes; +// } +// } else if (se.equals(NullEngine)) { +// break getAttributes; //could not determine type. Will move onto next artifact +// } +// } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID()) { +// browser = attribute.getValueString(); +// } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID()) { +// searchEngineDomain = attribute.getValueString(); +// } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID()) { +// last_accessed = attribute.getValueLong(); +// } +// } +// +// if (!se.equals(NullEngine) && !query.equals("NoQuery") && !query.equals("")) { +// Collection bbattributes = new ArrayList(); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), MODULE_NAME, searchEngineDomain)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), MODULE_NAME, query)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), MODULE_NAME, browser)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), MODULE_NAME, last_accessed)); +// this.addArtifact(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, file, bbattributes); +// se.increment(); +// ++totalQueries; +// } +// } +// } catch (TskException e) { +// logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); +// } finally { +// if (controller.isCancelled()) { +// logger.info("Operation terminated by user."); +// } +// services.fireModuleDataEvent(new ModuleDataEvent("RecentActivity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)); +// logger.info("Extracted " + totalQueries + " queries from the blackboard"); +// } +// } +// +// private String getTotals() { +// String total = ""; +// if (engines == null) { +// return total; +// } +// for (SearchEngineURLQueryAnalyzer.SearchEngine se : engines) { +// total+= se.getEngineName() + " : "+ se.getTotal() + "\n"; +// } +// return total; +// } +// +// @Override +// public void process(PipelineContextpipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// this.getURLs(dataSource, controller); +// logger.info("Search Engine stats: \n" + getTotals()); +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// try{ +// services = IngestServices.getDefault(); +// PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE); +// init2(); +// } +// catch(IOException e){ +// logger.log(Level.SEVERE, "Unable to find " + XMLFILE , e); +// } +// } +// +// +// private void init2() { +// try { +// String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; +// File f = new File(path); +// logger.log(Level.INFO, "Load successful"); +// DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); +// DocumentBuilder db = dbf.newDocumentBuilder(); +// Document xml = db.parse(f); +// xmlinput = xml; +// +// if (!XMLUtil.xmlIsValid(xml, SearchEngineURLQueryAnalyzer.class, XSDFILE)) { +// logger.log(Level.WARNING, "Error loading Search Engines: could not validate against [" + XSDFILE + "], results may not be accurate."); +// } +// createEngines(); +// getSearchEngineNames(); +// } catch (IOException e) { +// logger.log(Level.SEVERE, "Was not able to load SEUQAMappings.xml", e); +// } +// catch(ParserConfigurationException pce){ +// logger.log(Level.SEVERE, "Unable to build XML parser", pce); +// } +// catch(SAXException sxe){ +// logger.log(Level.SEVERE, "Unable to parse XML file", sxe); +// } +// } +// +// +// +// @Override +// public void complete() { +// logger.info("Search Engine URL Query Analyzer has completed."); +// } +// +// +// @Override +// public void stop() { +// logger.info("Attempted to stop Search Engine URL Query Analyzer, but operation is not supported; skipping..."); +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// String total = ""; +// for(String name : searchEngineNames){ +// total += name + "\n"; +// } +// return "Extracts search queries on the following search engines: \n" + total; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +//} diff --git a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java index 98bfbdf29c..958731c5cd 100644 --- a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java +++ b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java @@ -18,269 +18,268 @@ */ package org.sleuthkit.autopsy.scalpel; -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Level; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta; -import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver; -import org.sleuthkit.autopsy.scalpel.jni.ScalpelException; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.FileSystem; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.LayoutFile; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; -import org.sleuthkit.datamodel.TskFileRange; -import org.sleuthkit.datamodel.Volume; +//import java.io.File; +//import java.io.IOException; +//import java.util.ArrayList; +//import java.util.List; +//import java.util.logging.Level; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.coreutils.PlatformUtil; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +//import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta; +//import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver; +//import org.sleuthkit.autopsy.scalpel.jni.ScalpelException; +//import org.sleuthkit.datamodel.AbstractFile; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.datamodel.FileSystem; +//import org.sleuthkit.datamodel.Image; +//import org.sleuthkit.datamodel.LayoutFile; +//import org.sleuthkit.datamodel.SleuthkitCase; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; +//import org.sleuthkit.datamodel.TskFileRange; +//import org.sleuthkit.datamodel.Volume; /** * Scalpel carving ingest module */ -class ScalpelCarverIngestModule { // extends IngestModuleAbstractFile { // disable autodiscovery for now { - - private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); - - private static ScalpelCarverIngestModule instance; - private final String MODULE_NAME = "Scalpel Carver"; - private final String MODULE_DESCRIPTION = "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree."; - private final String MODULE_VERSION = Version.getVersion(); - private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; - private String moduleOutputDirPath; - private String configFileName = "scalpel.conf"; - private String configFilePath; - private boolean initialized = false; - private ScalpelCarver carver; - - private ScalpelCarverIngestModule() { - ScalpelCarver.init(); - } - - // @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - - if (!initialized) { - return ProcessResult.OK; - } - - // only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS - TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType(); - if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) { - return ProcessResult.OK; - } - - // create the output directory for this run - String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId(); - File scalpelOutputDir = new File(scalpelOutputDirPath); - if (!scalpelOutputDir.exists()) { - if (!scalpelOutputDir.mkdir()) { - logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath); - return ProcessResult.OK; - } - } - - // find the ID of the parent FileSystem, Volume or Image - long id = -1; - Content parent = null; - try { - parent = abstractFile.getParent(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex); - } - while (parent != null) { - if (parent instanceof FileSystem || - parent instanceof Volume || - parent instanceof Image) { - id = parent.getId(); - break; - } - try { - parent = parent.getParent(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex); - } - } - - // make sure we have a valid systemID - if (id == -1) { - logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile."); - return ProcessResult.OK; - } - - // carve the AbstractFile - List output = null; - try { - output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath); - } catch (ScalpelException ex) { - logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId()); - return ProcessResult.OK; - } - - - // get the image's size - long imageSize = Long.MAX_VALUE; - try { - - imageSize = abstractFile.getImage().getSize(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Could not obtain the image's size."); - } - - // add a carved file to the DB for each file that scalpel carved - SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase(); - List carvedFiles = new ArrayList(output.size()); - for (CarvedFileMeta carvedFileMeta : output) { - - // calculate the byte offset of this carved file - long byteOffset; - try { - byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart()); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")"); - break; - } - - // get the size of the carved file - long size = carvedFileMeta.getByteLength(); - - // create the list of TskFileRange objects - List data = new ArrayList(); - data.add(new TskFileRange(byteOffset, size, 0)); - - // add the carved file - try { - carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data)); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex); - } - } - - // get the IngestServices object - IngestServices is = IngestServices.getDefault(); - - // get the parent directory of the carved files - Content carvedFileDir = null; - if (!carvedFiles.isEmpty()) { - try { - carvedFileDir = carvedFiles.get(0).getParent(); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex); - } - } - - // send a notification about the carved files directory - if (carvedFileDir != null) { - is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir)); - } else { - logger.log(Level.SEVERE, "Could not obtain the carved files directory."); - } - - // reschedule carved files - for (LayoutFile carvedFile : carvedFiles) { - is.scheduleFile(carvedFile, pipelineContext); - } - - return ProcessResult.OK; - } - - - public static ScalpelCarverIngestModule getDefault() { - if (instance == null) { - synchronized (ScalpelCarverIngestModule.class) { - if (instance == null) { - instance = new ScalpelCarverIngestModule(); - } - } - } - return instance; - } - - // @Override - public void init(IngestModuleInit initContext) { - - // make sure this is Windows - String os = System.getProperty("os.name"); - if (!os.startsWith("Windows")) { - logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time."); - return; - } - - - carver = new ScalpelCarver(); - if (! carver.isInitialized()) { - logger.log(Level.SEVERE, "Error initializing scalpel carver. "); - return; - } - - // make sure module output directory exists; create it if it doesn't - moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + - File.separator + MODULE_OUTPUT_DIR_NAME; - File moduleOutputDir = new File(moduleOutputDirPath); - if (!moduleOutputDir.exists()) { - if (!moduleOutputDir.mkdir()) { - logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module."); - return; - } - } - - // create path to scalpel config file in user's home directory - configFilePath = PlatformUtil.getUserConfigDirectory() - + File.separator + configFileName; - - // copy the default config file to the user's home directory if one - // is not already there - try { - PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex); - return; - } - - initialized = true; - } - - // @Override - public void complete() { } - - // @Override - public void stop() { } - - // @Override - public String getName() { - return MODULE_NAME; - } - - // @Override - public String getVersion() { - return MODULE_VERSION; - } - - // @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - // @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - - - - - - -} +//class ScalpelCarverIngestModule { // extends IngestModuleAbstractFile { // disable autodiscovery for now { +// +// private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); +// +// private static ScalpelCarverIngestModule instance; +// private final String MODULE_NAME = "Scalpel Carver"; +// private final String MODULE_DESCRIPTION = "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree."; +// private final String MODULE_VERSION = Version.getVersion(); +// private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; +// private String moduleOutputDirPath; +// private String configFileName = "scalpel.conf"; +// private String configFilePath; +// private boolean initialized = false; +// private ScalpelCarver carver; +// +// private ScalpelCarverIngestModule() { +// ScalpelCarver.init(); +// } +// +// // @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// +// if (!initialized) { +// return ProcessResult.OK; +// } +// +// // only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS +// TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType(); +// if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) { +// return ProcessResult.OK; +// } +// +// // create the output directory for this run +// String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId(); +// File scalpelOutputDir = new File(scalpelOutputDirPath); +// if (!scalpelOutputDir.exists()) { +// if (!scalpelOutputDir.mkdir()) { +// logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath); +// return ProcessResult.OK; +// } +// } +// +// // find the ID of the parent FileSystem, Volume or Image +// long id = -1; +// Content parent = null; +// try { +// parent = abstractFile.getParent(); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex); +// } +// while (parent != null) { +// if (parent instanceof FileSystem || +// parent instanceof Volume || +// parent instanceof Image) { +// id = parent.getId(); +// break; +// } +// try { +// parent = parent.getParent(); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex); +// } +// } +// +// // make sure we have a valid systemID +// if (id == -1) { +// logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile."); +// return ProcessResult.OK; +// } +// +// // carve the AbstractFile +// List output = null; +// try { +// output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath); +// } catch (ScalpelException ex) { +// logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId()); +// return ProcessResult.OK; +// } +// +// +// // get the image's size +// long imageSize = Long.MAX_VALUE; +// try { +// +// imageSize = abstractFile.getImage().getSize(); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Could not obtain the image's size."); +// } +// +// // add a carved file to the DB for each file that scalpel carved +// SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase(); +// List carvedFiles = new ArrayList(output.size()); +// for (CarvedFileMeta carvedFileMeta : output) { +// +// // calculate the byte offset of this carved file +// long byteOffset; +// try { +// byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart()); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")"); +// break; +// } +// +// // get the size of the carved file +// long size = carvedFileMeta.getByteLength(); +// +// // create the list of TskFileRange objects +// List data = new ArrayList(); +// data.add(new TskFileRange(byteOffset, size, 0)); +// +// // add the carved file +// try { +// carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data)); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex); +// } +// } +// +// // get the IngestServices object +// IngestServices is = IngestServices.getDefault(); +// +// // get the parent directory of the carved files +// Content carvedFileDir = null; +// if (!carvedFiles.isEmpty()) { +// try { +// carvedFileDir = carvedFiles.get(0).getParent(); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex); +// } +// } +// +// // send a notification about the carved files directory +// if (carvedFileDir != null) { +// is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir)); +// } else { +// logger.log(Level.SEVERE, "Could not obtain the carved files directory."); +// } +// +// // reschedule carved files +// for (LayoutFile carvedFile : carvedFiles) { +// is.scheduleFile(carvedFile, pipelineContext); +// } +// +// return ProcessResult.OK; +// } +// +// +// public static ScalpelCarverIngestModule getDefault() { +// if (instance == null) { +// synchronized (ScalpelCarverIngestModule.class) { +// if (instance == null) { +// instance = new ScalpelCarverIngestModule(); +// } +// } +// } +// return instance; +// } +// +// // @Override +// public void init(IngestModuleInit initContext) { +// +// // make sure this is Windows +// String os = System.getProperty("os.name"); +// if (!os.startsWith("Windows")) { +// logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time."); +// return; +// } +// +// +// carver = new ScalpelCarver(); +// if (! carver.isInitialized()) { +// logger.log(Level.SEVERE, "Error initializing scalpel carver. "); +// return; +// } +// +// // make sure module output directory exists; create it if it doesn't +// moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + +// File.separator + MODULE_OUTPUT_DIR_NAME; +// File moduleOutputDir = new File(moduleOutputDirPath); +// if (!moduleOutputDir.exists()) { +// if (!moduleOutputDir.mkdir()) { +// logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module."); +// return; +// } +// } +// +// // create path to scalpel config file in user's home directory +// configFilePath = PlatformUtil.getUserConfigDirectory() +// + File.separator + configFileName; +// +// // copy the default config file to the user's home directory if one +// // is not already there +// try { +// PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex); +// return; +// } +// +// initialized = true; +// } +// +// // @Override +// public void complete() { } +// +// // @Override +// public void stop() { } +// +// // @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// // @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// // @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// // @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +// +// +// +// +// +// +//} diff --git a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java index 4f79973dcd..e2bb194232 100644 --- a/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java +++ b/SevenZip/src/org/sleuthkit/autopsy/sevenzip/SevenZipIngestModule.java @@ -18,47 +18,46 @@ */ package org.sleuthkit.autopsy.sevenzip; -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.logging.Level; -import net.sf.sevenzipjbinding.ISequentialOutStream; -import net.sf.sevenzipjbinding.ISevenZipInArchive; -import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.datamodel.AbstractFile; -import net.sf.sevenzipjbinding.SevenZip; -import net.sf.sevenzipjbinding.SevenZipException; -import net.sf.sevenzipjbinding.SevenZipNativeInitializationException; -import net.sf.sevenzipjbinding.simple.ISimpleInArchive; -import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem; -import org.netbeans.api.progress.ProgressHandle; -import org.netbeans.api.progress.ProgressHandleFactory; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.services.FileManager; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestMonitor; -import org.sleuthkit.autopsy.ingest.ModuleContentEvent; -import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -import org.sleuthkit.datamodel.DerivedFile; -import org.sleuthkit.datamodel.ReadContentInputStream; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; +//import java.io.BufferedOutputStream; +//import java.io.File; +//import java.io.FileNotFoundException; +//import java.io.FileOutputStream; +//import java.io.IOException; +//import java.io.OutputStream; +//import java.nio.ByteBuffer; +//import java.util.ArrayList; +//import java.util.Collections; +//import java.util.Date; +//import java.util.List; +//import java.util.logging.Level; +//import net.sf.sevenzipjbinding.ISequentialOutStream; +//import net.sf.sevenzipjbinding.ISevenZipInArchive; +//import org.sleuthkit.autopsy.coreutils.Logger; +//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.datamodel.AbstractFile; +//import net.sf.sevenzipjbinding.SevenZip; +//import net.sf.sevenzipjbinding.SevenZipException; +//import net.sf.sevenzipjbinding.SevenZipNativeInitializationException; +//import net.sf.sevenzipjbinding.simple.ISimpleInArchive; +//import net.sf.sevenzipjbinding.simple.ISimpleInArchiveItem; +//import org.netbeans.api.progress.ProgressHandle; +//import org.netbeans.api.progress.ProgressHandleFactory; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.casemodule.services.FileManager; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestMonitor; +//import org.sleuthkit.autopsy.ingest.ModuleContentEvent; +//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +//import org.sleuthkit.datamodel.BlackboardArtifact; +//import org.sleuthkit.datamodel.BlackboardAttribute; +//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +//import org.sleuthkit.datamodel.DerivedFile; +//import org.sleuthkit.datamodel.ReadContentInputStream; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; /** * 7Zip ingest module Extracts supported archives, adds extracted DerivedFiles, @@ -66,933 +65,933 @@ import org.sleuthkit.datamodel.TskData; * * Updates datamodel / directory tree with new files. */ -public final class SevenZipIngestModule extends IngestModuleAbstractFile { - - private static final Logger logger = Logger.getLogger(SevenZipIngestModule.class.getName()); - public static final String MODULE_NAME = "Archive Extractor"; - public static final String MODULE_DESCRIPTION = "Extracts archive files (zip, rar, arj, 7z, gzip, bzip2, tar), reschedules them to current ingest and populates directory tree with new files."; - final public static String MODULE_VERSION = Version.getVersion(); - private IngestServices services; - private volatile int messageID = 0; - private boolean initialized = false; - private static SevenZipIngestModule instance = null; - //TODO use content type detection instead of extensions - static final String[] SUPPORTED_EXTENSIONS = {"zip", "rar", "arj", "7z", "7zip", "gzip", "gz", "bzip2", "tar", "tgz", }; // "iso"}; - private String unpackDir; //relative to the case, to store in db - private String unpackDirPath; //absolute, to extract to - private FileManager fileManager; - //encryption type strings - private static final String ENCRYPTION_FILE_LEVEL = "File-level Encryption"; - private static final String ENCRYPTION_FULL = "Full Encryption"; - //zip bomb detection - private static final int MAX_DEPTH = 4; - private static final int MAX_COMPRESSION_RATIO = 600; - private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L; - private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB - //counts archive depth - private ArchiveDepthCountTree archiveDepthCountTree; - //buffer for checking file headers and signatures - private static final int readHeaderSize = 4; - private final byte[] fileHeaderBuffer = new byte[readHeaderSize]; - private static final int ZIP_SIGNATURE_BE = 0x504B0304; - - //private constructor to ensure singleton instance - private SevenZipIngestModule() { - } - - /** - * Returns singleton instance of the module, creates one if needed - * - * @return instance of the module - */ - public static synchronized SevenZipIngestModule getDefault() { - if (instance == null) { - instance = new SevenZipIngestModule(); - } - return instance; - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - initialized = false; - - final Case currentCase = Case.getCurrentCase(); - - unpackDir = Case.getModulesOutputDirRelPath() + File.separator + MODULE_NAME; - unpackDirPath = currentCase.getModulesOutputDirAbsPath() + File.separator + MODULE_NAME; - - fileManager = currentCase.getServices().getFileManager(); - - File unpackDirPathFile = new File(unpackDirPath); - if (!unpackDirPathFile.exists()) { - try { - unpackDirPathFile.mkdirs(); - } catch (SecurityException e) { - logger.log(Level.SEVERE, "Error initializing output dir: " + unpackDirPath, e); - String msg = "Error initializing " + MODULE_NAME; - String details = "Error initializing output dir: " + unpackDirPath + ": " + e.getMessage(); - //MessageNotifyUtil.Notify.error(msg, details); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - throw e; - } - } - - try { - SevenZip.initSevenZipFromPlatformJAR(); - String platform = SevenZip.getUsedPlatform(); - logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: " + platform); - } catch (SevenZipNativeInitializationException e) { - logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); - String msg = "Error initializing " + MODULE_NAME; - String details = "Could not initialize 7-ZIP library: " + e.getMessage(); - //MessageNotifyUtil.Notify.error(msg, details); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - throw new RuntimeException(e); - } - - archiveDepthCountTree = new ArchiveDepthCountTree(); - - initialized = true; - } - - @Override - public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { - - if (initialized == false) { //error initializing the module - logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); - return ProcessResult.OK; - } - - //skip unalloc - if(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - // skip known - if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) { - return IngestModuleAbstractFile.ProcessResult.OK; - } - - if (abstractFile.isFile() == false || !isSupported(abstractFile)) { - //do not process dirs and files that are not supported - return ProcessResult.OK; - } - - //check if already has derived files, skip - try { - if (abstractFile.hasChildren()) { - //check if local unpacked dir exists - final String uniqueFileName = getUniqueName(abstractFile); - final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); - if (new File(localRootAbsPath).exists()) { - logger.log(Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: " + abstractFile.getName()); - return ProcessResult.OK; - } - } - } catch (TskCoreException e) { - logger.log(Level.INFO, "Error checking if file already has been processed, skipping: " + abstractFile.getName()); - return ProcessResult.OK; - } - - logger.log(Level.INFO, "Processing with " + MODULE_NAME + ": " + abstractFile.getName()); - - List unpackedFiles = unpack(abstractFile); - if (!unpackedFiles.isEmpty()) { - sendNewFilesEvent(abstractFile, unpackedFiles); - rescheduleNewFiles(pipelineContext, unpackedFiles); - } - - return ProcessResult.OK; - } - - private void sendNewFilesEvent(AbstractFile archive, List unpackedFiles) { - //currently sending a single event for all new files - services.fireModuleContentEvent(new ModuleContentEvent(archive)); - } - - private void rescheduleNewFiles(PipelineContext pipelineContext, List unpackedFiles) { - for (AbstractFile unpackedFile : unpackedFiles) { - services.scheduleFile(unpackedFile, pipelineContext); - } - } - - /** - * Get local relative path to the unpacked archive root - * - * @param archiveFile - * @return - */ - private String getUniqueName(AbstractFile archiveFile) { - return archiveFile.getName() + "_" + archiveFile.getId(); - } - - /** - * Get local abs path to the unpacked archive root - * - * @param localRootRelPath relative path to archive, from - * getUniqueName() - * @return - */ - private String getLocalRootAbsPath(String localRootRelPath) { - return unpackDirPath + File.separator + localRootRelPath; - } - - /** - * Check if the item inside archive is a potential zipbomb - * - * Currently checks compression ratio. - * - * More heuristics to be added here - * - * @param archiveName the parent archive - * @param archiveFileItem the archive item - * @return true if potential zip bomb, false otherwise - */ - private boolean isZipBombArchiveItemCheck(String archiveName, ISimpleInArchiveItem archiveFileItem) { - try { - final long archiveItemSize = archiveFileItem.getSize(); - - //logger.log(Level.INFO, "ARCHIVE ITEM: " + archiveFileItem.getPath() + ", SIZE: " + archiveItemSize + " AR NAME: " + archiveName); - - //skip the check for small files - if (archiveItemSize < MIN_COMPRESSION_RATIO_SIZE) { - return false; - } - - final long archiveItemPackedSize = archiveFileItem.getPackedSize(); - - if (archiveItemPackedSize <= 0) { - logger.log(Level.WARNING, "Cannot getting compression ratio, cannot detect if zipbomb: " - + archiveName + ", item: " + archiveFileItem.getPath()); - return false; - } - - int cRatio = (int) (archiveItemSize / archiveItemPackedSize); - - if (cRatio >= MAX_COMPRESSION_RATIO) { - String itemName = archiveFileItem.getPath(); - logger.log(Level.INFO, "Possible zip bomb detected, compression ration: " + cRatio - + " for in archive item: " + itemName); - String msg = "Possible ZIP bomb detected in archive: " + archiveName - + ", item: " + itemName; - String details = "The archive item compression ratio is " + cRatio - + ", skipping processing of this archive item. "; - //MessageNotifyUtil.Notify.error(msg, details); - services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); - - return true; - } else { - return false; - } - - } catch (SevenZipException ex) { - logger.log(Level.SEVERE, "Error getting archive item size and cannot detect if zipbomb. ", ex); - return false; - } - } - - /** - * Unpack the file to local folder and return a list of derived files - * - * @param pipelineContext current ingest context - * @param archiveFile file to unpack - * @return list of unpacked derived files - */ - private List unpack(AbstractFile archiveFile) { - List unpackedFiles = Collections.emptyList(); - - //recursion depth check for zip bomb - final long archiveId = archiveFile.getId(); - ArchiveDepthCountTree.Archive parentAr = archiveDepthCountTree.findArchive(archiveId); - if (parentAr == null) { - parentAr = archiveDepthCountTree.addArchive(null, archiveId); - } else if (parentAr.getDepth() == MAX_DEPTH) { - String msg = "Possible ZIP bomb detected: " + archiveFile.getName(); - String details = "The archive is " + parentAr.getDepth() - + " levels deep, skipping processing of this archive and its contents "; - //MessageNotifyUtil.Notify.error(msg, details); - services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); - return unpackedFiles; - } - - boolean hasEncrypted = false; - boolean fullEncryption = true; - - ISevenZipInArchive inArchive = null; - SevenZipContentReadStream stream = null; - - final ProgressHandle progress = ProgressHandleFactory.createHandle(MODULE_NAME); - int processedItems = 0; - - String compressMethod = null; - try { - stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); - inArchive = SevenZip.openInArchive(null, // autodetect archive type - stream); - - int numItems = inArchive.getNumberOfItems(); - logger.log(Level.INFO, "Count of items in archive: " + archiveFile.getName() + ": " - + numItems); - progress.start(numItems); - - final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); - - //setup the archive local root folder - final String uniqueFileName = getUniqueName(archiveFile); - final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); - final File localRoot = new File(localRootAbsPath); - if (!localRoot.exists()) { - try { - localRoot.mkdirs(); - } catch (SecurityException e) { - logger.log(Level.SEVERE, "Error setting up output path for archive root: " + localRootAbsPath); - //bail - return unpackedFiles; - } - } - - //initialize tree hierarchy to keep track of unpacked file structure - UnpackedTree uTree = new UnpackedTree(unpackDir + "/" + uniqueFileName, archiveFile, fileManager); - - long freeDiskSpace = services.getFreeDiskSpace(); - - //unpack and process every item in archive - int itemNumber = 0; - for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { - String extractedPath = item.getPath(); - if (extractedPath == null || extractedPath.isEmpty() ) { - //some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path - //handle this for .tar.gz and tgz but assuming the child is tar, - //otherwise, unpack using itemNumber as name - - //TODO this should really be signature based, not extension based - String archName = archiveFile.getName(); - int dotI = archName.lastIndexOf("."); - String useName = null; - if (dotI != -1 ) { - String base = archName.substring(0, dotI); - String ext = archName.substring(dotI); - if (ext.equals(".gz") ) { - useName = base; - } - else if (ext.equals(".tgz")) { - useName = base + ".tar"; - } - } - - if (useName == null) { - extractedPath = "/" + archName + "/" + Integer.toString(itemNumber); - } - else { - extractedPath = "/" + useName; - } - - String msg = "Unknown item path in archive: " + archiveFile.getName() + ", will use: " + extractedPath; - logger.log(Level.WARNING, msg); - - } - ++itemNumber; - logger.log(Level.INFO, "Extracted item path: " + extractedPath); - - //check if possible zip bomb - if (isZipBombArchiveItemCheck(archiveFile.getName(), item)) { - continue; //skip the item - } - - //find this node in the hierarchy, create if needed - UnpackedTree.Data uNode = uTree.find(extractedPath); - - String fileName = uNode.getFileName(); - - //update progress bar - progress.progress(archiveFile.getName() + ": " + fileName, processedItems); - - if (compressMethod == null) { - compressMethod = item.getMethod(); - } - - final boolean isEncrypted = item.isEncrypted(); - final boolean isDir = item.isFolder(); - - if (isEncrypted) { - logger.log(Level.WARNING, "Skipping encrypted file in archive: " + extractedPath); - hasEncrypted = true; - continue; - } else { - fullEncryption = false; - } - - final long size = item.getSize(); - - //check if unpacking this file will result in out of disk space - //this is additional to zip bomb prevention mechanism - if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { //if known free space and file not empty - long newDiskSpace = freeDiskSpace - size; - if (newDiskSpace < MIN_FREE_DISK_SPACE) { - String msg = "Not enough disk space to unpack archive item: " + archiveFile.getName() + ", " + fileName; - String details = "The archive item is too large to unpack, skipping unpacking this item. "; - //MessageNotifyUtil.Notify.error(msg, details); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - logger.log(Level.INFO, "Skipping archive item due not sufficient disk space for this item: " + archiveFile.getName() + ", " + fileName); - continue; //skip this file - } else { - //update est. disk space during this archive, so we don't need to poll for every file extracted - freeDiskSpace = newDiskSpace; - } - } - - final String localFileRelPath = uniqueFileName + File.separator + extractedPath; - //final String localRelPath = unpackDir + File.separator + localFileRelPath; - final String localAbsPath = unpackDirPath + File.separator + localFileRelPath; - - //create local dirs and empty files before extracted - File localFile = new java.io.File(localAbsPath); - //cannot rely on files in top-bottom order - if (!localFile.exists()) { - try { - if (isDir) { - localFile.mkdirs(); - } else { - localFile.getParentFile().mkdirs(); - try { - localFile.createNewFile(); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), ex); - } - } - } catch (SecurityException e) { - logger.log(Level.SEVERE, "Error setting up output path for unpacked file: " + extractedPath); - //TODO consider bail out / msg to the user - } - } - - final Date createTime = item.getCreationTime(); - final Date accessTime = item.getLastAccessTime(); - final Date writeTime = item.getLastWriteTime(); - final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; - final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; - final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; - - //record derived data in unode, to be traversed later after unpacking the archive - uNode.addDerivedInfo(size, !isDir, - 0L, createtime, accesstime, modtime); - - //unpack locally if a file - if (!isDir) { - UnpackStream unpackStream = null; - try { - unpackStream = new UnpackStream(localAbsPath); - item.extractSlow(unpackStream); - } catch (Exception e) { - //could be something unexpected with this file, move on - logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); - } finally { - if (unpackStream != null) { - unpackStream.close(); - } - } - } - - //update units for progress bar - ++processedItems; - } //for every item in archive - - try { - uTree.createDerivedFiles(); - unpackedFiles = uTree.getAllFileObjects(); - - //check if children are archives, update archive depth tracking - for (AbstractFile unpackedFile : unpackedFiles) { - if (isSupported(unpackedFile)) { - archiveDepthCountTree.addArchive(parentAr, unpackedFile.getId()); - } - } - - } catch (TskCoreException e) { - logger.log(Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure"); - //TODO decide if anything to cleanup, for now bailing - } - - } catch (SevenZipException ex) { - logger.log(Level.SEVERE, "Error unpacking file: " + archiveFile, ex); - //inbox message - String fullName; - try { - fullName = archiveFile.getUniquePath(); - } catch (TskCoreException ex1) { - fullName = archiveFile.getName(); - } - - String msg = "Error unpacking " + archiveFile.getName(); - String details = "Error unpacking (" + - (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC) ? "allocated" : "deleted") + ") " + fullName - + ". " + ex.getMessage(); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - } finally { - if (inArchive != null) { - try { - inArchive.close(); - } catch (SevenZipException e) { - logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); - } - } - - if (stream != null) { - try { - stream.close(); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); - } - } - - //close progress bar - progress.finish(); - } - - //create artifact and send user message - if (hasEncrypted) { - String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; - try { - BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); - artifact.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), MODULE_NAME, encryptionType)); - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFile, ex); - } - - String msg = "Encrypted files in archive detected. "; - String details = "Some files in archive: " + archiveFile.getName() + " are encrypted. " - + MODULE_NAME + " extractor was unable to extract all files from this archive."; - // MessageNotifyUtil.Notify.info(msg, details); - - services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); - } - - return unpackedFiles; - } - - @Override - public void complete() { - if (initialized == false) { - return; - } - archiveDepthCountTree = null; - } - - @Override - public void stop() { - archiveDepthCountTree = null; - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - - private boolean isSupported(AbstractFile file) { - String fileNameLower = file.getName().toLowerCase(); - int dotI = fileNameLower.lastIndexOf("."); - if (dotI == -1 || dotI == fileNameLower.length() - 1) { - return false; //no extension - } - - final String extension = fileNameLower.substring(dotI + 1); - for (int i = 0; i < SUPPORTED_EXTENSIONS.length; ++i) { - if (extension.equals(SUPPORTED_EXTENSIONS[i])) { - return true; - } - } - - //if no extension match, check for zip signature - //(note, in near future, we will use pre-detected content type) - return isZipFileHeader(file); - } - - /** - * Check if is zip file based on header - * - * @param file - * @return true if zip file, false otherwise - */ - private boolean isZipFileHeader(AbstractFile file) { - if (file.getSize() < readHeaderSize) { - return false; - } - - int bytesRead = 0; - try { - bytesRead = file.read(fileHeaderBuffer, 0, readHeaderSize); - } catch (TskCoreException ex) { - //ignore if can't read the first few bytes, not a ZIP - return false; - } - if (bytesRead != readHeaderSize) { - return false; - } - - ByteBuffer bytes = ByteBuffer.wrap(fileHeaderBuffer); - int signature = bytes.getInt(); - - return signature == ZIP_SIGNATURE_BE; - } - - /** - * Stream used to unpack the archive to local file - */ - private static class UnpackStream implements ISequentialOutStream { - - private OutputStream output; - private String localAbsPath; - - UnpackStream(String localAbsPath) { - try { - output = new BufferedOutputStream(new FileOutputStream(localAbsPath)); - } catch (FileNotFoundException ex) { - logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); - } - - } - - @Override - public int write(byte[] bytes) throws SevenZipException { - try { - output.write(bytes); - } catch (IOException ex) { - throw new SevenZipException("Error writing unpacked file to: " + localAbsPath, ex); - } - return bytes.length; - } - - public void close() { - if (output != null) { - try { - output.flush(); - output.close(); - } catch (IOException e) { - logger.log(Level.SEVERE, "Error closing unpack stream for file: " + localAbsPath); - } - } - } - } - - /** - * Representation of local directory tree of unpacked archive. Used to track - * of local tree file hierarchy, archive depth, and files created to easily - * and reliably get parent AbstractFile for unpacked file. So that we don't - * have to depend on type of traversal of unpacked files handed to us by - * 7zip unpacker. - */ - private static class UnpackedTree { - - final String localPathRoot; - final Data root; //dummy root to hold children - final FileManager fileManager; - - UnpackedTree(String localPathRoot, AbstractFile archiveRoot, FileManager fileManager) { - this.localPathRoot = localPathRoot; - this.fileManager = fileManager; - this.root = new Data(); - this.root.setFile(archiveRoot); - this.root.setFileName(archiveRoot.getName()); - this.root.localRelPath = localPathRoot; - } - - /** - * Tokenizes filePath passed in and traverses the dir structure, - * creating data nodes on the path way as needed - * - * @param filePath file path with 1 or more tokens separated by / - * @return child node for the last file token in the filePath - */ - Data find(String filePath) { - String[] toks = filePath.split("[\\/\\\\]"); - List tokens = new ArrayList(); - for (int i = 0; i < toks.length; ++i) { - if (!toks[i].isEmpty()) { - tokens.add(toks[i]); - } - } - return find(root, tokens); - } - - /** - * recursive method that traverses the path - * - * @param tokenPath - * @return - */ - private Data find(Data parent, List tokenPath) { - //base case - if (tokenPath.isEmpty()) { - return parent; - } - - String childName = tokenPath.remove(0); //step towards base case - Data child = parent.getChild(childName); - if (child == null) { - child = new Data(childName, parent); - } - return find(child, tokenPath); - - } - - /** - * Get the root file objects (after createDerivedFiles() ) of this tree, - * so that they can be rescheduled. - * - * @return root objects of this unpacked tree - */ - List getRootFileObjects() { - List ret = new ArrayList(); - for (Data child : root.children) { - ret.add(child.getFile()); - } - return ret; - } - - /** - * Get the all file objects (after createDerivedFiles() ) of this tree, - * so that they can be rescheduled. - * - * @return all file objects of this unpacked tree - */ - List getAllFileObjects() { - List ret = new ArrayList(); - for (Data child : root.children) { - getAllFileObjectsRec(ret, child); - } - return ret; - } - - private void getAllFileObjectsRec(List list, Data parent) { - list.add(parent.getFile()); - for (Data child : parent.children) { - getAllFileObjectsRec(list, child); - } - } - - /** - * Traverse the tree top-down after unzipping is done and create derived - * files for the entire hierarchy - */ - void createDerivedFiles() throws TskCoreException { - for (Data child : root.children) { - createDerivedFilesRec(child); - } - - } - - private void createDerivedFilesRec(Data node) throws TskCoreException { - final String fileName = node.getFileName(); - final String localRelPath = node.getLocalRelPath(); - final long size = node.getSize(); - final boolean isFile = node.isIsFile(); - final AbstractFile parent = node.getParent().getFile(); - - try { - DerivedFile df = fileManager.addDerivedFile(fileName, localRelPath, size, - node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), - isFile, parent, "", MODULE_NAME, "", ""); - node.setFile(df); - - - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, "Error adding a derived file to db:" + fileName, ex); - throw new TskCoreException("Error adding a derived file to db:" + fileName, ex); - } - - //recurse - for (Data child : node.children) { - createDerivedFilesRec(child); - } - - - } - - private static class Data { - - private String fileName; - private AbstractFile file; - private List children = new ArrayList(); - private String localRelPath; - private long size; - private long ctime, crtime, atime, mtime; - private boolean isFile; - private Data parent; - - //root constructor - Data() { - } - - //child node constructor - Data(String fileName, Data parent) { - this.fileName = fileName; - this.parent = parent; - this.localRelPath = parent.localRelPath + File.separator + fileName; - //new child derived file will be set by unpack() method - parent.children.add(this); - - } - - public long getCtime() { - return ctime; - } - - public long getCrtime() { - return crtime; - } - - public long getAtime() { - return atime; - } - - public long getMtime() { - return mtime; - } - - public void setFileName(String fileName) { - this.fileName = fileName; - } - - Data getParent() { - return parent; - } - - void addDerivedInfo(long size, - boolean isFile, - long ctime, long crtime, long atime, long mtime) { - this.size = size; - this.isFile = isFile; - this.ctime = ctime; - this.crtime = crtime; - this.atime = atime; - this.mtime = mtime; - } - - void setFile(AbstractFile file) { - this.file = file; - } - - /** - * get child by name or null if it doesn't exist - * - * @param childFileName - * @return - */ - Data getChild(String childFileName) { - Data ret = null; - for (Data child : children) { - if (child.fileName.equals(childFileName)) { - ret = child; - break; - } - } - return ret; - } - - public String getFileName() { - return fileName; - } - - public AbstractFile getFile() { - return file; - } - - public String getLocalRelPath() { - return localRelPath; - } - - public long getSize() { - return size; - } - - public boolean isIsFile() { - return isFile; - } - } - } - - /** - * Tracks archive hierarchy and archive depth - */ - private static class ArchiveDepthCountTree { - - //keeps all nodes refs for easy search - private final List archives = new ArrayList(); - - /** - * Search for previously added parent archive by id - * - * @param objectId parent archive object id - * @return the archive node or null if not found - */ - Archive findArchive(long objectId) { - for (Archive ar : archives) { - if (ar.objectId == objectId) { - return ar; - } - } - - return null; - } - - /** - * Add a new archive to track of depth - * - * @param parent parent archive or null - * @param objectId object id of the new archive - * @return the archive added - */ - Archive addArchive(Archive parent, long objectId) { - Archive child = new Archive(parent, objectId); - archives.add(child); - return child; - } - - private static class Archive { - - int depth; - long objectId; - Archive parent; - List children; - - Archive(Archive parent, long objectId) { - this.parent = parent; - this.objectId = objectId; - children = new ArrayList(); - if (parent != null) { - parent.children.add(this); - this.depth = parent.depth + 1; - } else { - this.depth = 0; - } - } - - /** - * get archive depth of this archive - * - * @return - */ - int getDepth() { - return depth; - } - } - } -} +//public final class SevenZipIngestModule extends IngestModuleAbstractFile { +// +// private static final Logger logger = Logger.getLogger(SevenZipIngestModule.class.getName()); +// public static final String MODULE_NAME = "Archive Extractor"; +// public static final String MODULE_DESCRIPTION = "Extracts archive files (zip, rar, arj, 7z, gzip, bzip2, tar), reschedules them to current ingest and populates directory tree with new files."; +// final public static String MODULE_VERSION = Version.getVersion(); +// private IngestServices services; +// private volatile int messageID = 0; +// private boolean initialized = false; +// private static SevenZipIngestModule instance = null; +// //TODO use content type detection instead of extensions +// static final String[] SUPPORTED_EXTENSIONS = {"zip", "rar", "arj", "7z", "7zip", "gzip", "gz", "bzip2", "tar", "tgz", }; // "iso"}; +// private String unpackDir; //relative to the case, to store in db +// private String unpackDirPath; //absolute, to extract to +// private FileManager fileManager; +// //encryption type strings +// private static final String ENCRYPTION_FILE_LEVEL = "File-level Encryption"; +// private static final String ENCRYPTION_FULL = "Full Encryption"; +// //zip bomb detection +// private static final int MAX_DEPTH = 4; +// private static final int MAX_COMPRESSION_RATIO = 600; +// private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L; +// private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB +// //counts archive depth +// private ArchiveDepthCountTree archiveDepthCountTree; +// //buffer for checking file headers and signatures +// private static final int readHeaderSize = 4; +// private final byte[] fileHeaderBuffer = new byte[readHeaderSize]; +// private static final int ZIP_SIGNATURE_BE = 0x504B0304; +// +// //private constructor to ensure singleton instance +// private SevenZipIngestModule() { +// } +// +// /** +// * Returns singleton instance of the module, creates one if needed +// * +// * @return instance of the module +// */ +// public static synchronized SevenZipIngestModule getDefault() { +// if (instance == null) { +// instance = new SevenZipIngestModule(); +// } +// return instance; +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// initialized = false; +// +// final Case currentCase = Case.getCurrentCase(); +// +// unpackDir = Case.getModulesOutputDirRelPath() + File.separator + MODULE_NAME; +// unpackDirPath = currentCase.getModulesOutputDirAbsPath() + File.separator + MODULE_NAME; +// +// fileManager = currentCase.getServices().getFileManager(); +// +// File unpackDirPathFile = new File(unpackDirPath); +// if (!unpackDirPathFile.exists()) { +// try { +// unpackDirPathFile.mkdirs(); +// } catch (SecurityException e) { +// logger.log(Level.SEVERE, "Error initializing output dir: " + unpackDirPath, e); +// String msg = "Error initializing " + MODULE_NAME; +// String details = "Error initializing output dir: " + unpackDirPath + ": " + e.getMessage(); +// //MessageNotifyUtil.Notify.error(msg, details); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// throw e; +// } +// } +// +// try { +// SevenZip.initSevenZipFromPlatformJAR(); +// String platform = SevenZip.getUsedPlatform(); +// logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: " + platform); +// } catch (SevenZipNativeInitializationException e) { +// logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); +// String msg = "Error initializing " + MODULE_NAME; +// String details = "Could not initialize 7-ZIP library: " + e.getMessage(); +// //MessageNotifyUtil.Notify.error(msg, details); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// throw new RuntimeException(e); +// } +// +// archiveDepthCountTree = new ArchiveDepthCountTree(); +// +// initialized = true; +// } +// +// @Override +// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { +// +// if (initialized == false) { //error initializing the module +// logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); +// return ProcessResult.OK; +// } +// +// //skip unalloc +// if(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { +// return IngestModuleAbstractFile.ProcessResult.OK; +// } +// +// // skip known +// if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) { +// return IngestModuleAbstractFile.ProcessResult.OK; +// } +// +// if (abstractFile.isFile() == false || !isSupported(abstractFile)) { +// //do not process dirs and files that are not supported +// return ProcessResult.OK; +// } +// +// //check if already has derived files, skip +// try { +// if (abstractFile.hasChildren()) { +// //check if local unpacked dir exists +// final String uniqueFileName = getUniqueName(abstractFile); +// final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); +// if (new File(localRootAbsPath).exists()) { +// logger.log(Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: " + abstractFile.getName()); +// return ProcessResult.OK; +// } +// } +// } catch (TskCoreException e) { +// logger.log(Level.INFO, "Error checking if file already has been processed, skipping: " + abstractFile.getName()); +// return ProcessResult.OK; +// } +// +// logger.log(Level.INFO, "Processing with " + MODULE_NAME + ": " + abstractFile.getName()); +// +// List unpackedFiles = unpack(abstractFile); +// if (!unpackedFiles.isEmpty()) { +// sendNewFilesEvent(abstractFile, unpackedFiles); +// rescheduleNewFiles(pipelineContext, unpackedFiles); +// } +// +// return ProcessResult.OK; +// } +// +// private void sendNewFilesEvent(AbstractFile archive, List unpackedFiles) { +// //currently sending a single event for all new files +// services.fireModuleContentEvent(new ModuleContentEvent(archive)); +// } +// +// private void rescheduleNewFiles(PipelineContext pipelineContext, List unpackedFiles) { +// for (AbstractFile unpackedFile : unpackedFiles) { +// services.scheduleFile(unpackedFile, pipelineContext); +// } +// } +// +// /** +// * Get local relative path to the unpacked archive root +// * +// * @param archiveFile +// * @return +// */ +// private String getUniqueName(AbstractFile archiveFile) { +// return archiveFile.getName() + "_" + archiveFile.getId(); +// } +// +// /** +// * Get local abs path to the unpacked archive root +// * +// * @param localRootRelPath relative path to archive, from +// * getUniqueName() +// * @return +// */ +// private String getLocalRootAbsPath(String localRootRelPath) { +// return unpackDirPath + File.separator + localRootRelPath; +// } +// +// /** +// * Check if the item inside archive is a potential zipbomb +// * +// * Currently checks compression ratio. +// * +// * More heuristics to be added here +// * +// * @param archiveName the parent archive +// * @param archiveFileItem the archive item +// * @return true if potential zip bomb, false otherwise +// */ +// private boolean isZipBombArchiveItemCheck(String archiveName, ISimpleInArchiveItem archiveFileItem) { +// try { +// final long archiveItemSize = archiveFileItem.getSize(); +// +// //logger.log(Level.INFO, "ARCHIVE ITEM: " + archiveFileItem.getPath() + ", SIZE: " + archiveItemSize + " AR NAME: " + archiveName); +// +// //skip the check for small files +// if (archiveItemSize < MIN_COMPRESSION_RATIO_SIZE) { +// return false; +// } +// +// final long archiveItemPackedSize = archiveFileItem.getPackedSize(); +// +// if (archiveItemPackedSize <= 0) { +// logger.log(Level.WARNING, "Cannot getting compression ratio, cannot detect if zipbomb: " +// + archiveName + ", item: " + archiveFileItem.getPath()); +// return false; +// } +// +// int cRatio = (int) (archiveItemSize / archiveItemPackedSize); +// +// if (cRatio >= MAX_COMPRESSION_RATIO) { +// String itemName = archiveFileItem.getPath(); +// logger.log(Level.INFO, "Possible zip bomb detected, compression ration: " + cRatio +// + " for in archive item: " + itemName); +// String msg = "Possible ZIP bomb detected in archive: " + archiveName +// + ", item: " + itemName; +// String details = "The archive item compression ratio is " + cRatio +// + ", skipping processing of this archive item. "; +// //MessageNotifyUtil.Notify.error(msg, details); +// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); +// +// return true; +// } else { +// return false; +// } +// +// } catch (SevenZipException ex) { +// logger.log(Level.SEVERE, "Error getting archive item size and cannot detect if zipbomb. ", ex); +// return false; +// } +// } +// +// /** +// * Unpack the file to local folder and return a list of derived files +// * +// * @param pipelineContext current ingest context +// * @param archiveFile file to unpack +// * @return list of unpacked derived files +// */ +// private List unpack(AbstractFile archiveFile) { +// List unpackedFiles = Collections.emptyList(); +// +// //recursion depth check for zip bomb +// final long archiveId = archiveFile.getId(); +// ArchiveDepthCountTree.Archive parentAr = archiveDepthCountTree.findArchive(archiveId); +// if (parentAr == null) { +// parentAr = archiveDepthCountTree.addArchive(null, archiveId); +// } else if (parentAr.getDepth() == MAX_DEPTH) { +// String msg = "Possible ZIP bomb detected: " + archiveFile.getName(); +// String details = "The archive is " + parentAr.getDepth() +// + " levels deep, skipping processing of this archive and its contents "; +// //MessageNotifyUtil.Notify.error(msg, details); +// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); +// return unpackedFiles; +// } +// +// boolean hasEncrypted = false; +// boolean fullEncryption = true; +// +// ISevenZipInArchive inArchive = null; +// SevenZipContentReadStream stream = null; +// +// final ProgressHandle progress = ProgressHandleFactory.createHandle(MODULE_NAME); +// int processedItems = 0; +// +// String compressMethod = null; +// try { +// stream = new SevenZipContentReadStream(new ReadContentInputStream(archiveFile)); +// inArchive = SevenZip.openInArchive(null, // autodetect archive type +// stream); +// +// int numItems = inArchive.getNumberOfItems(); +// logger.log(Level.INFO, "Count of items in archive: " + archiveFile.getName() + ": " +// + numItems); +// progress.start(numItems); +// +// final ISimpleInArchive simpleInArchive = inArchive.getSimpleInterface(); +// +// //setup the archive local root folder +// final String uniqueFileName = getUniqueName(archiveFile); +// final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName); +// final File localRoot = new File(localRootAbsPath); +// if (!localRoot.exists()) { +// try { +// localRoot.mkdirs(); +// } catch (SecurityException e) { +// logger.log(Level.SEVERE, "Error setting up output path for archive root: " + localRootAbsPath); +// //bail +// return unpackedFiles; +// } +// } +// +// //initialize tree hierarchy to keep track of unpacked file structure +// UnpackedTree uTree = new UnpackedTree(unpackDir + "/" + uniqueFileName, archiveFile, fileManager); +// +// long freeDiskSpace = services.getFreeDiskSpace(); +// +// //unpack and process every item in archive +// int itemNumber = 0; +// for (ISimpleInArchiveItem item : simpleInArchive.getArchiveItems()) { +// String extractedPath = item.getPath(); +// if (extractedPath == null || extractedPath.isEmpty() ) { +// //some formats (.tar.gz) may not be handled correctly -- file in archive has no name/path +// //handle this for .tar.gz and tgz but assuming the child is tar, +// //otherwise, unpack using itemNumber as name +// +// //TODO this should really be signature based, not extension based +// String archName = archiveFile.getName(); +// int dotI = archName.lastIndexOf("."); +// String useName = null; +// if (dotI != -1 ) { +// String base = archName.substring(0, dotI); +// String ext = archName.substring(dotI); +// if (ext.equals(".gz") ) { +// useName = base; +// } +// else if (ext.equals(".tgz")) { +// useName = base + ".tar"; +// } +// } +// +// if (useName == null) { +// extractedPath = "/" + archName + "/" + Integer.toString(itemNumber); +// } +// else { +// extractedPath = "/" + useName; +// } +// +// String msg = "Unknown item path in archive: " + archiveFile.getName() + ", will use: " + extractedPath; +// logger.log(Level.WARNING, msg); +// +// } +// ++itemNumber; +// logger.log(Level.INFO, "Extracted item path: " + extractedPath); +// +// //check if possible zip bomb +// if (isZipBombArchiveItemCheck(archiveFile.getName(), item)) { +// continue; //skip the item +// } +// +// //find this node in the hierarchy, create if needed +// UnpackedTree.Data uNode = uTree.find(extractedPath); +// +// String fileName = uNode.getFileName(); +// +// //update progress bar +// progress.progress(archiveFile.getName() + ": " + fileName, processedItems); +// +// if (compressMethod == null) { +// compressMethod = item.getMethod(); +// } +// +// final boolean isEncrypted = item.isEncrypted(); +// final boolean isDir = item.isFolder(); +// +// if (isEncrypted) { +// logger.log(Level.WARNING, "Skipping encrypted file in archive: " + extractedPath); +// hasEncrypted = true; +// continue; +// } else { +// fullEncryption = false; +// } +// +// final long size = item.getSize(); +// +// //check if unpacking this file will result in out of disk space +// //this is additional to zip bomb prevention mechanism +// if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && size > 0) { //if known free space and file not empty +// long newDiskSpace = freeDiskSpace - size; +// if (newDiskSpace < MIN_FREE_DISK_SPACE) { +// String msg = "Not enough disk space to unpack archive item: " + archiveFile.getName() + ", " + fileName; +// String details = "The archive item is too large to unpack, skipping unpacking this item. "; +// //MessageNotifyUtil.Notify.error(msg, details); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// logger.log(Level.INFO, "Skipping archive item due not sufficient disk space for this item: " + archiveFile.getName() + ", " + fileName); +// continue; //skip this file +// } else { +// //update est. disk space during this archive, so we don't need to poll for every file extracted +// freeDiskSpace = newDiskSpace; +// } +// } +// +// final String localFileRelPath = uniqueFileName + File.separator + extractedPath; +// //final String localRelPath = unpackDir + File.separator + localFileRelPath; +// final String localAbsPath = unpackDirPath + File.separator + localFileRelPath; +// +// //create local dirs and empty files before extracted +// File localFile = new java.io.File(localAbsPath); +// //cannot rely on files in top-bottom order +// if (!localFile.exists()) { +// try { +// if (isDir) { +// localFile.mkdirs(); +// } else { +// localFile.getParentFile().mkdirs(); +// try { +// localFile.createNewFile(); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error creating extracted file: " + localFile.getAbsolutePath(), ex); +// } +// } +// } catch (SecurityException e) { +// logger.log(Level.SEVERE, "Error setting up output path for unpacked file: " + extractedPath); +// //TODO consider bail out / msg to the user +// } +// } +// +// final Date createTime = item.getCreationTime(); +// final Date accessTime = item.getLastAccessTime(); +// final Date writeTime = item.getLastWriteTime(); +// final long createtime = createTime == null ? 0L : createTime.getTime() / 1000; +// final long modtime = writeTime == null ? 0L : writeTime.getTime() / 1000; +// final long accesstime = accessTime == null ? 0L : accessTime.getTime() / 1000; +// +// //record derived data in unode, to be traversed later after unpacking the archive +// uNode.addDerivedInfo(size, !isDir, +// 0L, createtime, accesstime, modtime); +// +// //unpack locally if a file +// if (!isDir) { +// UnpackStream unpackStream = null; +// try { +// unpackStream = new UnpackStream(localAbsPath); +// item.extractSlow(unpackStream); +// } catch (Exception e) { +// //could be something unexpected with this file, move on +// logger.log(Level.WARNING, "Could not extract file from archive: " + localAbsPath, e); +// } finally { +// if (unpackStream != null) { +// unpackStream.close(); +// } +// } +// } +// +// //update units for progress bar +// ++processedItems; +// } //for every item in archive +// +// try { +// uTree.createDerivedFiles(); +// unpackedFiles = uTree.getAllFileObjects(); +// +// //check if children are archives, update archive depth tracking +// for (AbstractFile unpackedFile : unpackedFiles) { +// if (isSupported(unpackedFile)) { +// archiveDepthCountTree.addArchive(parentAr, unpackedFile.getId()); +// } +// } +// +// } catch (TskCoreException e) { +// logger.log(Level.SEVERE, "Error populating complete derived file hierarchy from the unpacked dir structure"); +// //TODO decide if anything to cleanup, for now bailing +// } +// +// } catch (SevenZipException ex) { +// logger.log(Level.SEVERE, "Error unpacking file: " + archiveFile, ex); +// //inbox message +// String fullName; +// try { +// fullName = archiveFile.getUniquePath(); +// } catch (TskCoreException ex1) { +// fullName = archiveFile.getName(); +// } +// +// String msg = "Error unpacking " + archiveFile.getName(); +// String details = "Error unpacking (" + +// (archiveFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC) ? "allocated" : "deleted") + ") " + fullName +// + ". " + ex.getMessage(); +// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); +// } finally { +// if (inArchive != null) { +// try { +// inArchive.close(); +// } catch (SevenZipException e) { +// logger.log(Level.SEVERE, "Error closing archive: " + archiveFile, e); +// } +// } +// +// if (stream != null) { +// try { +// stream.close(); +// } catch (IOException ex) { +// logger.log(Level.SEVERE, "Error closing stream after unpacking archive: " + archiveFile, ex); +// } +// } +// +// //close progress bar +// progress.finish(); +// } +// +// //create artifact and send user message +// if (hasEncrypted) { +// String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; +// try { +// BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); +// artifact.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), MODULE_NAME, encryptionType)); +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED)); +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFile, ex); +// } +// +// String msg = "Encrypted files in archive detected. "; +// String details = "Some files in archive: " + archiveFile.getName() + " are encrypted. " +// + MODULE_NAME + " extractor was unable to extract all files from this archive."; +// // MessageNotifyUtil.Notify.info(msg, details); +// +// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, msg, details)); +// } +// +// return unpackedFiles; +// } +// +// @Override +// public void complete() { +// if (initialized == false) { +// return; +// } +// archiveDepthCountTree = null; +// } +// +// @Override +// public void stop() { +// archiveDepthCountTree = null; +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +// +// private boolean isSupported(AbstractFile file) { +// String fileNameLower = file.getName().toLowerCase(); +// int dotI = fileNameLower.lastIndexOf("."); +// if (dotI == -1 || dotI == fileNameLower.length() - 1) { +// return false; //no extension +// } +// +// final String extension = fileNameLower.substring(dotI + 1); +// for (int i = 0; i < SUPPORTED_EXTENSIONS.length; ++i) { +// if (extension.equals(SUPPORTED_EXTENSIONS[i])) { +// return true; +// } +// } +// +// //if no extension match, check for zip signature +// //(note, in near future, we will use pre-detected content type) +// return isZipFileHeader(file); +// } +// +// /** +// * Check if is zip file based on header +// * +// * @param file +// * @return true if zip file, false otherwise +// */ +// private boolean isZipFileHeader(AbstractFile file) { +// if (file.getSize() < readHeaderSize) { +// return false; +// } +// +// int bytesRead = 0; +// try { +// bytesRead = file.read(fileHeaderBuffer, 0, readHeaderSize); +// } catch (TskCoreException ex) { +// //ignore if can't read the first few bytes, not a ZIP +// return false; +// } +// if (bytesRead != readHeaderSize) { +// return false; +// } +// +// ByteBuffer bytes = ByteBuffer.wrap(fileHeaderBuffer); +// int signature = bytes.getInt(); +// +// return signature == ZIP_SIGNATURE_BE; +// } +// +// /** +// * Stream used to unpack the archive to local file +// */ +// private static class UnpackStream implements ISequentialOutStream { +// +// private OutputStream output; +// private String localAbsPath; +// +// UnpackStream(String localAbsPath) { +// try { +// output = new BufferedOutputStream(new FileOutputStream(localAbsPath)); +// } catch (FileNotFoundException ex) { +// logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); +// } +// +// } +// +// @Override +// public int write(byte[] bytes) throws SevenZipException { +// try { +// output.write(bytes); +// } catch (IOException ex) { +// throw new SevenZipException("Error writing unpacked file to: " + localAbsPath, ex); +// } +// return bytes.length; +// } +// +// public void close() { +// if (output != null) { +// try { +// output.flush(); +// output.close(); +// } catch (IOException e) { +// logger.log(Level.SEVERE, "Error closing unpack stream for file: " + localAbsPath); +// } +// } +// } +// } +// +// /** +// * Representation of local directory tree of unpacked archive. Used to track +// * of local tree file hierarchy, archive depth, and files created to easily +// * and reliably get parent AbstractFile for unpacked file. So that we don't +// * have to depend on type of traversal of unpacked files handed to us by +// * 7zip unpacker. +// */ +// private static class UnpackedTree { +// +// final String localPathRoot; +// final Data root; //dummy root to hold children +// final FileManager fileManager; +// +// UnpackedTree(String localPathRoot, AbstractFile archiveRoot, FileManager fileManager) { +// this.localPathRoot = localPathRoot; +// this.fileManager = fileManager; +// this.root = new Data(); +// this.root.setFile(archiveRoot); +// this.root.setFileName(archiveRoot.getName()); +// this.root.localRelPath = localPathRoot; +// } +// +// /** +// * Tokenizes filePath passed in and traverses the dir structure, +// * creating data nodes on the path way as needed +// * +// * @param filePath file path with 1 or more tokens separated by / +// * @return child node for the last file token in the filePath +// */ +// Data find(String filePath) { +// String[] toks = filePath.split("[\\/\\\\]"); +// List tokens = new ArrayList(); +// for (int i = 0; i < toks.length; ++i) { +// if (!toks[i].isEmpty()) { +// tokens.add(toks[i]); +// } +// } +// return find(root, tokens); +// } +// +// /** +// * recursive method that traverses the path +// * +// * @param tokenPath +// * @return +// */ +// private Data find(Data parent, List tokenPath) { +// //base case +// if (tokenPath.isEmpty()) { +// return parent; +// } +// +// String childName = tokenPath.remove(0); //step towards base case +// Data child = parent.getChild(childName); +// if (child == null) { +// child = new Data(childName, parent); +// } +// return find(child, tokenPath); +// +// } +// +// /** +// * Get the root file objects (after createDerivedFiles() ) of this tree, +// * so that they can be rescheduled. +// * +// * @return root objects of this unpacked tree +// */ +// List getRootFileObjects() { +// List ret = new ArrayList(); +// for (Data child : root.children) { +// ret.add(child.getFile()); +// } +// return ret; +// } +// +// /** +// * Get the all file objects (after createDerivedFiles() ) of this tree, +// * so that they can be rescheduled. +// * +// * @return all file objects of this unpacked tree +// */ +// List getAllFileObjects() { +// List ret = new ArrayList(); +// for (Data child : root.children) { +// getAllFileObjectsRec(ret, child); +// } +// return ret; +// } +// +// private void getAllFileObjectsRec(List list, Data parent) { +// list.add(parent.getFile()); +// for (Data child : parent.children) { +// getAllFileObjectsRec(list, child); +// } +// } +// +// /** +// * Traverse the tree top-down after unzipping is done and create derived +// * files for the entire hierarchy +// */ +// void createDerivedFiles() throws TskCoreException { +// for (Data child : root.children) { +// createDerivedFilesRec(child); +// } +// +// } +// +// private void createDerivedFilesRec(Data node) throws TskCoreException { +// final String fileName = node.getFileName(); +// final String localRelPath = node.getLocalRelPath(); +// final long size = node.getSize(); +// final boolean isFile = node.isIsFile(); +// final AbstractFile parent = node.getParent().getFile(); +// +// try { +// DerivedFile df = fileManager.addDerivedFile(fileName, localRelPath, size, +// node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), +// isFile, parent, "", MODULE_NAME, "", ""); +// node.setFile(df); +// +// +// } catch (TskCoreException ex) { +// logger.log(Level.SEVERE, "Error adding a derived file to db:" + fileName, ex); +// throw new TskCoreException("Error adding a derived file to db:" + fileName, ex); +// } +// +// //recurse +// for (Data child : node.children) { +// createDerivedFilesRec(child); +// } +// +// +// } +// +// private static class Data { +// +// private String fileName; +// private AbstractFile file; +// private List children = new ArrayList(); +// private String localRelPath; +// private long size; +// private long ctime, crtime, atime, mtime; +// private boolean isFile; +// private Data parent; +// +// //root constructor +// Data() { +// } +// +// //child node constructor +// Data(String fileName, Data parent) { +// this.fileName = fileName; +// this.parent = parent; +// this.localRelPath = parent.localRelPath + File.separator + fileName; +// //new child derived file will be set by unpack() method +// parent.children.add(this); +// +// } +// +// public long getCtime() { +// return ctime; +// } +// +// public long getCrtime() { +// return crtime; +// } +// +// public long getAtime() { +// return atime; +// } +// +// public long getMtime() { +// return mtime; +// } +// +// public void setFileName(String fileName) { +// this.fileName = fileName; +// } +// +// Data getParent() { +// return parent; +// } +// +// void addDerivedInfo(long size, +// boolean isFile, +// long ctime, long crtime, long atime, long mtime) { +// this.size = size; +// this.isFile = isFile; +// this.ctime = ctime; +// this.crtime = crtime; +// this.atime = atime; +// this.mtime = mtime; +// } +// +// void setFile(AbstractFile file) { +// this.file = file; +// } +// +// /** +// * get child by name or null if it doesn't exist +// * +// * @param childFileName +// * @return +// */ +// Data getChild(String childFileName) { +// Data ret = null; +// for (Data child : children) { +// if (child.fileName.equals(childFileName)) { +// ret = child; +// break; +// } +// } +// return ret; +// } +// +// public String getFileName() { +// return fileName; +// } +// +// public AbstractFile getFile() { +// return file; +// } +// +// public String getLocalRelPath() { +// return localRelPath; +// } +// +// public long getSize() { +// return size; +// } +// +// public boolean isIsFile() { +// return isFile; +// } +// } +// } +// +// /** +// * Tracks archive hierarchy and archive depth +// */ +// private static class ArchiveDepthCountTree { +// +// //keeps all nodes refs for easy search +// private final List archives = new ArrayList(); +// +// /** +// * Search for previously added parent archive by id +// * +// * @param objectId parent archive object id +// * @return the archive node or null if not found +// */ +// Archive findArchive(long objectId) { +// for (Archive ar : archives) { +// if (ar.objectId == objectId) { +// return ar; +// } +// } +// +// return null; +// } +// +// /** +// * Add a new archive to track of depth +// * +// * @param parent parent archive or null +// * @param objectId object id of the new archive +// * @return the archive added +// */ +// Archive addArchive(Archive parent, long objectId) { +// Archive child = new Archive(parent, objectId); +// archives.add(child); +// return child; +// } +// +// private static class Archive { +// +// int depth; +// long objectId; +// Archive parent; +// List children; +// +// Archive(Archive parent, long objectId) { +// this.parent = parent; +// this.objectId = objectId; +// children = new ArrayList(); +// if (parent != null) { +// parent.children.add(this); +// this.depth = parent.depth + 1; +// } else { +// this.depth = 0; +// } +// } +// +// /** +// * get archive depth of this archive +// * +// * @return +// */ +// int getDepth() { +// return depth; +// } +// } +// } +//} diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index c6afccff95..b006eb386e 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -16,206 +16,204 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.ewfverify; +//package org.sleuthkit.autopsy.ewfverify; +// +//import java.security.MessageDigest; +//import java.security.NoSuchAlgorithmException; +//import java.util.logging.Level; +//import java.util.logging.Logger; +//import javax.xml.bind.DatatypeConverter; +//import org.sleuthkit.autopsy.casemodule.Case; +//import org.sleuthkit.autopsy.coreutils.Version; +//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +//import org.sleuthkit.autopsy.ingest.IngestMessage; +//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; +//import org.sleuthkit.autopsy.ingest.IngestModuleInit; +//import org.sleuthkit.autopsy.ingest.IngestServices; +//import org.sleuthkit.datamodel.Content; +//import org.sleuthkit.datamodel.Image; +//import org.sleuthkit.datamodel.SleuthkitCase; +//import org.sleuthkit.datamodel.TskCoreException; +//import org.sleuthkit.datamodel.TskData; - -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.logging.Level; -import java.util.logging.Logger; -import javax.xml.bind.DatatypeConverter; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.coreutils.Version; -import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; -import org.sleuthkit.autopsy.ingest.IngestServices; -import org.sleuthkit.autopsy.ingest.PipelineContext; -import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.Image; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; -import org.sleuthkit.datamodel.TskData; - -/** - * Data Source Ingest Module that generates a hash of an E01 image file and - * verifies it with the value stored in the image. - * - * @author jwallace - */ -public class EwfVerifyIngestModule extends IngestModuleDataSource { - private static final String MODULE_NAME = "EWF Verify"; - private static final String MODULE_VERSION = Version.getVersion(); - private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; - private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; - private IngestServices services; - private volatile boolean running = false; - private Image img; - private String imgName; - private MessageDigest messageDigest; - private static Logger logger = null; - private static int messageId = 0; - private boolean verified = false; - private boolean skipped = false; - private String calculatedHash = ""; - private String storedHash = ""; - private SleuthkitCase skCase; - - public EwfVerifyIngestModule() { - } - - @Override - public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { - imgName = dataSource.getName(); - try { - img = dataSource.getImage(); - } catch (TskCoreException ex) { - img = null; - logger.log(Level.SEVERE, "Failed to get image from Content.", ex); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, - "Error processing " + imgName)); - return; - } - - // Skip images that are not E01 - if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { - img = null; - logger.log(Level.INFO, "Skipping non-ewf image " + imgName); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, - "Skipping non-ewf image " + imgName)); - skipped = true; - return; - } - - - if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) - { - storedHash = img.getMd5().toLowerCase(); - logger.info("Hash value stored in " + imgName + ": " + storedHash); - - } - else { - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, - "Image " + imgName + " does not have stored hash.")); - return; - } - - logger.log(Level.INFO, "Starting ewf verification of " + img.getName()); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, - "Starting " + imgName)); - - long size = img.getSize(); - if (size == 0) { - logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried."); - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, - "Error getting size of " + imgName + ". Image will not be processed.")); - } - - // Libewf uses a sector size of 64 times the sector size, which is the - // motivation for using it here. - long chunkSize = 64 * img.getSsize(); - chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; - - int totalChunks = (int) Math.ceil(size / chunkSize); - logger.log(Level.INFO, "Total chunks = " + totalChunks); - int read; - - byte[] data; - controller.switchToDeterminate(totalChunks); - - running = true; - // Read in byte size chunks and update the hash value with the data. - for (int i = 0; i < totalChunks; i++) { - if (controller.isCancelled()) { - running = false; - return; - } - data = new byte[ (int) chunkSize ]; - try { - read = img.read(data, i * chunkSize, chunkSize); - } catch (TskCoreException ex) { - String msg = "Error reading " + imgName + " at chunk " + i; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); - logger.log(Level.SEVERE, msg, ex); - return; - } - messageDigest.update(data); - controller.progress(i); - } - - // Finish generating the hash and get it as a string value - calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); - verified = calculatedHash.equals(storedHash); - logger.info("Hash calculated from " + imgName + ": " + calculatedHash); - running = false; - } - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - skCase = Case.getCurrentCase().getSleuthkitCase(); - running = false; - verified = false; - skipped = false; - img = null; - imgName = ""; - storedHash = ""; - calculatedHash = ""; - - if (logger == null) { - logger = services.getLogger(this); - } - - if (messageDigest == null) { - try { - messageDigest = MessageDigest.getInstance("MD5"); - } catch (NoSuchAlgorithmException ex) { - logger.log(Level.WARNING, "Error getting md5 algorithm", ex); - throw new RuntimeException("Failed to get MD5 algorithm"); - } - } else { - messageDigest.reset(); - } - } - - @Override - public void complete() { - logger.info("complete() " + this.getName()); - if (skipped == false) { - String msg = verified ? " verified" : " not verified"; - String extra = "

EWF Verification Results for " + imgName + "

"; - extra += "
  • Result:" + msg + "
  • "; - extra += "
  • Calculated hash: " + calculatedHash + "
  • "; - extra += "
  • Stored hash: " + storedHash + "
  • "; - services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); - logger.info(imgName + msg); - } - } - - @Override - public void stop() { - running = false; - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - @Override - public String getDescription() { - return MODULE_DESCRIPTION; - } - - @Override - public boolean hasBackgroundJobsRunning() { - return running; - } -} +///** +// * Data Source Ingest Module that generates a hash of an E01 image file and +// * verifies it with the value stored in the image. +// * +// * @author jwallace +// */ +//public class EwfVerifyIngestModule extends IngestModuleDataSource { +// private static final String MODULE_NAME = "EWF Verify"; +// private static final String MODULE_VERSION = Version.getVersion(); +// private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; +// private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; +// private IngestServices services; +// private volatile boolean running = false; +// private Image img; +// private String imgName; +// private MessageDigest messageDigest; +// private static Logger logger = null; +// private static int messageId = 0; +// private boolean verified = false; +// private boolean skipped = false; +// private String calculatedHash = ""; +// private String storedHash = ""; +// private SleuthkitCase skCase; +// +// public EwfVerifyIngestModule() { +// } +// +// @Override +// public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { +// imgName = dataSource.getName(); +// try { +// img = dataSource.getImage(); +// } catch (TskCoreException ex) { +// img = null; +// logger.log(Level.SEVERE, "Failed to get image from Content.", ex); +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, +// "Error processing " + imgName)); +// return; +// } +// +// // Skip images that are not E01 +// if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { +// img = null; +// logger.log(Level.INFO, "Skipping non-ewf image " + imgName); +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, +// "Skipping non-ewf image " + imgName)); +// skipped = true; +// return; +// } +// +// +// if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) +// { +// storedHash = img.getMd5().toLowerCase(); +// logger.info("Hash value stored in " + imgName + ": " + storedHash); +// +// } +// else { +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, +// "Image " + imgName + " does not have stored hash.")); +// return; +// } +// +// logger.log(Level.INFO, "Starting ewf verification of " + img.getName()); +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, +// "Starting " + imgName)); +// +// long size = img.getSize(); +// if (size == 0) { +// logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried."); +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, +// "Error getting size of " + imgName + ". Image will not be processed.")); +// } +// +// // Libewf uses a sector size of 64 times the sector size, which is the +// // motivation for using it here. +// long chunkSize = 64 * img.getSsize(); +// chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; +// +// int totalChunks = (int) Math.ceil(size / chunkSize); +// logger.log(Level.INFO, "Total chunks = " + totalChunks); +// int read; +// +// byte[] data; +// controller.switchToDeterminate(totalChunks); +// +// running = true; +// // Read in byte size chunks and update the hash value with the data. +// for (int i = 0; i < totalChunks; i++) { +// if (controller.isCancelled()) { +// running = false; +// return; +// } +// data = new byte[ (int) chunkSize ]; +// try { +// read = img.read(data, i * chunkSize, chunkSize); +// } catch (TskCoreException ex) { +// String msg = "Error reading " + imgName + " at chunk " + i; +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); +// logger.log(Level.SEVERE, msg, ex); +// return; +// } +// messageDigest.update(data); +// controller.progress(i); +// } +// +// // Finish generating the hash and get it as a string value +// calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); +// verified = calculatedHash.equals(storedHash); +// logger.info("Hash calculated from " + imgName + ": " + calculatedHash); +// running = false; +// } +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// skCase = Case.getCurrentCase().getSleuthkitCase(); +// running = false; +// verified = false; +// skipped = false; +// img = null; +// imgName = ""; +// storedHash = ""; +// calculatedHash = ""; +// +// if (logger == null) { +// logger = services.getLogger(this); +// } +// +// if (messageDigest == null) { +// try { +// messageDigest = MessageDigest.getInstance("MD5"); +// } catch (NoSuchAlgorithmException ex) { +// logger.log(Level.WARNING, "Error getting md5 algorithm", ex); +// throw new RuntimeException("Failed to get MD5 algorithm"); +// } +// } else { +// messageDigest.reset(); +// } +// } +// +// @Override +// public void complete() { +// logger.info("complete() " + this.getName()); +// if (skipped == false) { +// String msg = verified ? " verified" : " not verified"; +// String extra = "

    EWF Verification Results for " + imgName + "

    "; +// extra += "
  • Result:" + msg + "
  • "; +// extra += "
  • Calculated hash: " + calculatedHash + "
  • "; +// extra += "
  • Stored hash: " + storedHash + "
  • "; +// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); +// logger.info(imgName + msg); +// } +// } +// +// @Override +// public void stop() { +// running = false; +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// @Override +// public String getDescription() { +// return MODULE_DESCRIPTION; +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return running; +// } +//} diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index 4527cf737b..d4371f8f05 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -34,7 +34,6 @@ import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; @@ -48,382 +47,382 @@ import org.sleuthkit.datamodel.TskException; * File-level ingest module that detects MBOX files based on signature. * Understands Thunderbird folder layout to provide additional structure and metadata. */ -public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { - - private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()); - private static ThunderbirdMboxFileIngestModule instance = null; - private IngestServices services; - private static final String MODULE_NAME = "Email Parser"; - private final String hashDBModuleName = "Hash Lookup"; - final public static String MODULE_VERSION = Version.getVersion(); - private int messageId = 0; - private FileManager fileManager; - - public static synchronized ThunderbirdMboxFileIngestModule getDefault() { - if (instance == null) { - instance = new ThunderbirdMboxFileIngestModule(); - } - return instance; - } - - @Override - public ProcessResult process(PipelineContextingestContext, AbstractFile abstractFile) { - - // skip known - if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) { - return ProcessResult.OK; - } - - //skip unalloc - if(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return ProcessResult.OK; - } - - //file has read error, stop processing it - // @@@ I don't really like this - // we don't know if Hash was run or if it had lookup errors - IngestModuleAbstractFile.ProcessResult hashDBResult = - services.getAbstractFileModuleResult(hashDBModuleName); - if (hashDBResult == IngestModuleAbstractFile.ProcessResult.ERROR) { - return ProcessResult.ERROR; - } - - if (abstractFile.isVirtual()) { - return ProcessResult.OK; - } - - // check its signature - boolean isMbox = false; - try { - byte[] t = new byte[64]; - if (abstractFile.getSize() > 64) { - int byteRead = abstractFile.read(t, 0, 64); - if (byteRead > 0) { - isMbox = MboxParser.isValidMimeTypeMbox(t); - } - } - } catch (TskException ex) { - logger.log(Level.WARNING, null, ex); - } - - if (isMbox) { - return processMBox(abstractFile, ingestContext); - } - - int extIndex = abstractFile.getName().lastIndexOf("."); - String ext = (extIndex == -1 ? "" : abstractFile.getName().substring(extIndex)); - if (PstParser.isPstFile(abstractFile)) { - return processPst(ingestContext, abstractFile); - } - - return ProcessResult.OK; - } - - /** - * Processes a pst/ost data file and extracts and adds email artifacts. - * - * @param abstractFile The pst/ost data file to process. - * @return - */ - private ProcessResult processPst(PipelineContextingestContext, AbstractFile abstractFile) { - String fileName = getTempPath() + File.separator + abstractFile.getName() - + "-" + String.valueOf(abstractFile.getId()); - File file = new File(fileName); - - if (abstractFile.getSize() >= services.getFreeDiskSpace()) { - logger.log(Level.WARNING, "Not enough disk space to write file to disk."); - IngestMessage msg = IngestMessage.createErrorMessage(messageId++, this, getName(), "Out of disk space. Can't copy " + abstractFile.getName() + " to parse."); - services.postMessage(msg); - return ProcessResult.OK; - } - - try { - ContentUtils.writeToFile(abstractFile, file); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed writing pst file to disk.", ex); - return ProcessResult.OK; - } - - PstParser parser = new PstParser(services); - PstParser.ParseResult result = parser.parse(file); - - if (result == PstParser.ParseResult.OK) { - // parse success: Process email and add artifacts - processEmails(parser.getResults(), abstractFile, ingestContext); - } else if (result == PstParser.ParseResult.ENCRYPT) { - // encrypted pst: Add encrypted file artifact - try { - BlackboardArtifact generalInfo = abstractFile.getGenInfoArtifact(); - generalInfo.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID(), - MODULE_NAME, "File-level Encryption")); - } catch (TskCoreException ex) { - logger.log(Level.INFO, "Failed to add encryption attribute to file: " + abstractFile.getName()); - } - } else { - // parsing error: log message - postErrorMessage("Error while processing " + abstractFile.getName(), - "Only files from Outlook 2003 and later are supported."); - logger.log(Level.INFO, "PSTParser failed to parse " + abstractFile.getName()); - return ProcessResult.ERROR; - } - - if (file.delete() == false) { - logger.log(Level.INFO, "Failed to delete temp file: " + file.getName()); - } - - String errors = parser.getErrors(); - if (errors.isEmpty() == false) { - postErrorMessage("Error while processing " + abstractFile.getName(), errors); - } - - return ProcessResult.OK; - } - - /** - * Parse and extract email messages and attachments from an MBox file. - * @param abstractFile - * @param ingestContext - * @return - */ - private ProcessResult processMBox(AbstractFile abstractFile, PipelineContextingestContext) { - String mboxFileName = abstractFile.getName(); - String mboxParentDir = abstractFile.getParentPath(); - // use the local path to determine the e-mail folder structure - String emailFolder = ""; - // email folder is everything after "Mail" or ImapMail - if (mboxParentDir.contains("/Mail/")) { - emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/Mail/") + 5); - } - else if (mboxParentDir.contains("/ImapMail/")) { - emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/ImapMail/") + 9); - } - emailFolder = emailFolder + mboxFileName; - emailFolder = emailFolder.replaceAll(".sbd", ""); - - String fileName = getTempPath() + File.separator + abstractFile.getName() - + "-" + String.valueOf(abstractFile.getId()); - File file = new File(fileName); - - if (abstractFile.getSize() >= services.getFreeDiskSpace()) { - logger.log(Level.WARNING, "Not enough disk space to write file to disk."); - postErrorMessage("Error while processing " + abstractFile.getName(), - "Out of disk space. Can't copy file to parse."); - return ProcessResult.OK; - } - - try { - ContentUtils.writeToFile(abstractFile, file); - } catch (IOException ex) { - logger.log(Level.WARNING, "Failed writing mbox file to disk.", ex); - return ProcessResult.OK; - } - - MboxParser parser = new MboxParser(services, emailFolder); - List emails = parser.parse(file); - - processEmails(emails, abstractFile, ingestContext); - - if (file.delete() == false) { - logger.log(Level.INFO, "Failed to delete temp file: " + file.getName()); - } - - String errors = parser.getErrors(); - if (errors.isEmpty() == false) { - postErrorMessage("Error while processing " + abstractFile.getName(), errors); - } - - return ProcessResult.OK; - } - - /** - * Get a path to a temporary folder. - * @return - */ - public static String getTempPath() { - String tmpDir = Case.getCurrentCase().getTempDirectory() + File.separator - + "EmailParser"; - File dir = new File(tmpDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return tmpDir; - } - - public static String getModuleOutputPath() { - String outDir = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + - MODULE_NAME; - File dir = new File(outDir); - if (dir.exists() == false) { - dir.mkdirs(); - } - return outDir; - } - - public static String getRelModuleOutputPath() { - return Case.getModulesOutputDirRelPath() + File.separator + - MODULE_NAME; - } - - @Override - public void complete() { - } - - @Override - public String getName() { - return MODULE_NAME; - } - - @Override - public String getDescription() { - return "This module detects and parses mbox and pst/ost files and populates email artifacts in the blackboard."; - } - - @Override - public String getVersion() { - return MODULE_VERSION; - } - - - @Override - public void init(IngestModuleInit initContext) { - services = IngestServices.getDefault(); - fileManager = Case.getCurrentCase().getServices().getFileManager(); - } - - @Override - public void stop() { - } - - @Override - public boolean hasBackgroundJobsRunning() { - return false; - } - - /** - * Take the extracted information in the email messages and add the - * appropriate artifacts and derived files. - * @param emails - * @param abstractFile - * @param ingestContext - */ - private void processEmails(List emails, AbstractFile abstractFile, PipelineContextingestContext) { - List derivedFiles = new ArrayList<>(); - for (EmailMessage email : emails) { - if (email.hasAttachment()) { - derivedFiles.addAll(handleAttachments(email.getAttachments(), abstractFile)); - } - addArtifact(email, abstractFile); - } - - if (derivedFiles.isEmpty() == false) { - for (AbstractFile derived : derivedFiles) { - services.fireModuleContentEvent(new ModuleContentEvent(abstractFile)); - services.scheduleFile(derived, ingestContext); - } - } - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG)); - } - - /** - * Add the given attachments as derived files and reschedule them for ingest. - * @param attachments - * @param abstractFile - * @return - */ - private List handleAttachments(List attachments, AbstractFile abstractFile) { - List files = new ArrayList<>(); - for (Attachment attach : attachments) { - String filename = attach.getName(); - long crTime = attach.getCrTime(); - long mTime = attach.getmTime(); - long aTime = attach.getaTime(); - long cTime = attach.getcTime(); - String relPath = attach.getLocalPath(); - long size = attach.getSize(); - - try { - DerivedFile df = fileManager.addDerivedFile(filename, relPath, - size, cTime, crTime, aTime, mTime, true, abstractFile, "", - MODULE_NAME, MODULE_VERSION, ""); - files.add(df); - } catch (TskCoreException ex) { - postErrorMessage("Error processing " + abstractFile.getName(), - "Failed to add attachment named " + filename + " to the case."); - logger.log(Level.INFO, "", ex); - } - } - return files; - } - - /** - * Add a blackboard artifact for the given email message. - * @param email - * @param abstractFile - */ - private void addArtifact(EmailMessage email, AbstractFile abstractFile) { - List bbattributes = new ArrayList<>(); - String to = email.getRecipients(); - String cc = email.getCc(); - String bcc = email.getBcc(); - String from = email.getSender(); - long dateL = email.getSentDate(); - String body = email.getTextBody(); - String bodyHTML = email.getHtmlBody(); - String rtf = email.getRtfBody(); - String subject = email.getSubject(); - long id = email.getId(); - String localPath = email.getLocalPath(); - - if (to.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID(), MODULE_NAME, to)); - } - if (cc.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CC.getTypeID(), MODULE_NAME, cc)); - } - if (bcc.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_BCC.getTypeID(), MODULE_NAME, bcc)); - } - if (from.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID(), MODULE_NAME, from)); - } - if (dateL > 0) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_RCVD.getTypeID(), MODULE_NAME, dateL)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_SENT.getTypeID(), MODULE_NAME, dateL)); - } - if (body.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_PLAIN.getTypeID(), MODULE_NAME, body)); - } - if (bodyHTML.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_HTML.getTypeID(), MODULE_NAME, bodyHTML)); - } - if (rtf.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_RTF.getTypeID(), MODULE_NAME, rtf)); - } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_MSG_ID.getTypeID(), MODULE_NAME, ((id < 0L) ? "Not available" : String.valueOf(id)))); - if (subject.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID(), MODULE_NAME, subject)); - } - if (localPath.isEmpty() == false) { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, localPath)); - } else { - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, "/foo/bar")); - } - - try { - BlackboardArtifact bbart; - bbart = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG); - bbart.addAttributes(bbattributes); - } catch (TskCoreException ex) { - logger.log(Level.WARNING, null, ex); - } - } - - void postErrorMessage(String subj, String details) { - IngestMessage ingestMessage = IngestMessage.createErrorMessage(messageId++, this, subj, details); - services.postMessage(ingestMessage); - } - - IngestServices getServices() { - return services; - } -} \ No newline at end of file +//public class ThunderbirdMboxFileIngestModule extends IngestModuleAbstractFile { +// +// private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()); +// private static ThunderbirdMboxFileIngestModule instance = null; +// private IngestServices services; +// private static final String MODULE_NAME = "Email Parser"; +// private final String hashDBModuleName = "Hash Lookup"; +// final public static String MODULE_VERSION = Version.getVersion(); +// private int messageId = 0; +// private FileManager fileManager; +// +// public static synchronized ThunderbirdMboxFileIngestModule getDefault() { +// if (instance == null) { +// instance = new ThunderbirdMboxFileIngestModule(); +// } +// return instance; +// } +// +// @Override +// public ProcessResult process(PipelineContextingestContext, AbstractFile abstractFile) { +// +// // skip known +// if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) { +// return ProcessResult.OK; +// } +// +// //skip unalloc +// if(abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { +// return ProcessResult.OK; +// } +// +// //file has read error, stop processing it +// // @@@ I don't really like this +// // we don't know if Hash was run or if it had lookup errors +// IngestModuleAbstractFile.ProcessResult hashDBResult = +// services.getAbstractFileModuleResult(hashDBModuleName); +// if (hashDBResult == IngestModuleAbstractFile.ProcessResult.ERROR) { +// return ProcessResult.ERROR; +// } +// +// if (abstractFile.isVirtual()) { +// return ProcessResult.OK; +// } +// +// // check its signature +// boolean isMbox = false; +// try { +// byte[] t = new byte[64]; +// if (abstractFile.getSize() > 64) { +// int byteRead = abstractFile.read(t, 0, 64); +// if (byteRead > 0) { +// isMbox = MboxParser.isValidMimeTypeMbox(t); +// } +// } +// } catch (TskException ex) { +// logger.log(Level.WARNING, null, ex); +// } +// +// if (isMbox) { +// return processMBox(abstractFile, ingestContext); +// } +// +// int extIndex = abstractFile.getName().lastIndexOf("."); +// String ext = (extIndex == -1 ? "" : abstractFile.getName().substring(extIndex)); +// if (PstParser.isPstFile(abstractFile)) { +// return processPst(ingestContext, abstractFile); +// } +// +// return ProcessResult.OK; +// } +// +// /** +// * Processes a pst/ost data file and extracts and adds email artifacts. +// * +// * @param abstractFile The pst/ost data file to process. +// * @return +// */ +// private ProcessResult processPst(PipelineContextingestContext, AbstractFile abstractFile) { +// String fileName = getTempPath() + File.separator + abstractFile.getName() +// + "-" + String.valueOf(abstractFile.getId()); +// File file = new File(fileName); +// +// if (abstractFile.getSize() >= services.getFreeDiskSpace()) { +// logger.log(Level.WARNING, "Not enough disk space to write file to disk."); +// IngestMessage msg = IngestMessage.createErrorMessage(messageId++, this, getName(), "Out of disk space. Can't copy " + abstractFile.getName() + " to parse."); +// services.postMessage(msg); +// return ProcessResult.OK; +// } +// +// try { +// ContentUtils.writeToFile(abstractFile, file); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed writing pst file to disk.", ex); +// return ProcessResult.OK; +// } +// +// PstParser parser = new PstParser(services); +// PstParser.ParseResult result = parser.parse(file); +// +// if (result == PstParser.ParseResult.OK) { +// // parse success: Process email and add artifacts +// processEmails(parser.getResults(), abstractFile, ingestContext); +// } else if (result == PstParser.ParseResult.ENCRYPT) { +// // encrypted pst: Add encrypted file artifact +// try { +// BlackboardArtifact generalInfo = abstractFile.getGenInfoArtifact(); +// generalInfo.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID(), +// MODULE_NAME, "File-level Encryption")); +// } catch (TskCoreException ex) { +// logger.log(Level.INFO, "Failed to add encryption attribute to file: " + abstractFile.getName()); +// } +// } else { +// // parsing error: log message +// postErrorMessage("Error while processing " + abstractFile.getName(), +// "Only files from Outlook 2003 and later are supported."); +// logger.log(Level.INFO, "PSTParser failed to parse " + abstractFile.getName()); +// return ProcessResult.ERROR; +// } +// +// if (file.delete() == false) { +// logger.log(Level.INFO, "Failed to delete temp file: " + file.getName()); +// } +// +// String errors = parser.getErrors(); +// if (errors.isEmpty() == false) { +// postErrorMessage("Error while processing " + abstractFile.getName(), errors); +// } +// +// return ProcessResult.OK; +// } +// +// /** +// * Parse and extract email messages and attachments from an MBox file. +// * @param abstractFile +// * @param ingestContext +// * @return +// */ +// private ProcessResult processMBox(AbstractFile abstractFile, PipelineContextingestContext) { +// String mboxFileName = abstractFile.getName(); +// String mboxParentDir = abstractFile.getParentPath(); +// // use the local path to determine the e-mail folder structure +// String emailFolder = ""; +// // email folder is everything after "Mail" or ImapMail +// if (mboxParentDir.contains("/Mail/")) { +// emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/Mail/") + 5); +// } +// else if (mboxParentDir.contains("/ImapMail/")) { +// emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/ImapMail/") + 9); +// } +// emailFolder = emailFolder + mboxFileName; +// emailFolder = emailFolder.replaceAll(".sbd", ""); +// +// String fileName = getTempPath() + File.separator + abstractFile.getName() +// + "-" + String.valueOf(abstractFile.getId()); +// File file = new File(fileName); +// +// if (abstractFile.getSize() >= services.getFreeDiskSpace()) { +// logger.log(Level.WARNING, "Not enough disk space to write file to disk."); +// postErrorMessage("Error while processing " + abstractFile.getName(), +// "Out of disk space. Can't copy file to parse."); +// return ProcessResult.OK; +// } +// +// try { +// ContentUtils.writeToFile(abstractFile, file); +// } catch (IOException ex) { +// logger.log(Level.WARNING, "Failed writing mbox file to disk.", ex); +// return ProcessResult.OK; +// } +// +// MboxParser parser = new MboxParser(services, emailFolder); +// List emails = parser.parse(file); +// +// processEmails(emails, abstractFile, ingestContext); +// +// if (file.delete() == false) { +// logger.log(Level.INFO, "Failed to delete temp file: " + file.getName()); +// } +// +// String errors = parser.getErrors(); +// if (errors.isEmpty() == false) { +// postErrorMessage("Error while processing " + abstractFile.getName(), errors); +// } +// +// return ProcessResult.OK; +// } +// +// /** +// * Get a path to a temporary folder. +// * @return +// */ +// public static String getTempPath() { +// String tmpDir = Case.getCurrentCase().getTempDirectory() + File.separator +// + "EmailParser"; +// File dir = new File(tmpDir); +// if (dir.exists() == false) { +// dir.mkdirs(); +// } +// return tmpDir; +// } +// +// public static String getModuleOutputPath() { +// String outDir = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + +// MODULE_NAME; +// File dir = new File(outDir); +// if (dir.exists() == false) { +// dir.mkdirs(); +// } +// return outDir; +// } +// +// public static String getRelModuleOutputPath() { +// return Case.getModulesOutputDirRelPath() + File.separator + +// MODULE_NAME; +// } +// +// @Override +// public void complete() { +// } +// +// @Override +// public String getName() { +// return MODULE_NAME; +// } +// +// @Override +// public String getDescription() { +// return "This module detects and parses mbox and pst/ost files and populates email artifacts in the blackboard."; +// } +// +// @Override +// public String getVersion() { +// return MODULE_VERSION; +// } +// +// +// @Override +// public void init(IngestModuleInit initContext) { +// services = IngestServices.getDefault(); +// fileManager = Case.getCurrentCase().getServices().getFileManager(); +// } +// +// @Override +// public void stop() { +// } +// +// @Override +// public boolean hasBackgroundJobsRunning() { +// return false; +// } +// +// /** +// * Take the extracted information in the email messages and add the +// * appropriate artifacts and derived files. +// * @param emails +// * @param abstractFile +// * @param ingestContext +// */ +// private void processEmails(List emails, AbstractFile abstractFile, PipelineContextingestContext) { +// List derivedFiles = new ArrayList<>(); +// for (EmailMessage email : emails) { +// if (email.hasAttachment()) { +// derivedFiles.addAll(handleAttachments(email.getAttachments(), abstractFile)); +// } +// addArtifact(email, abstractFile); +// } +// +// if (derivedFiles.isEmpty() == false) { +// for (AbstractFile derived : derivedFiles) { +// services.fireModuleContentEvent(new ModuleContentEvent(abstractFile)); +// services.scheduleFile(derived, ingestContext); +// } +// } +// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG)); +// } +// +// /** +// * Add the given attachments as derived files and reschedule them for ingest. +// * @param attachments +// * @param abstractFile +// * @return +// */ +// private List handleAttachments(List attachments, AbstractFile abstractFile) { +// List files = new ArrayList<>(); +// for (Attachment attach : attachments) { +// String filename = attach.getName(); +// long crTime = attach.getCrTime(); +// long mTime = attach.getmTime(); +// long aTime = attach.getaTime(); +// long cTime = attach.getcTime(); +// String relPath = attach.getLocalPath(); +// long size = attach.getSize(); +// +// try { +// DerivedFile df = fileManager.addDerivedFile(filename, relPath, +// size, cTime, crTime, aTime, mTime, true, abstractFile, "", +// MODULE_NAME, MODULE_VERSION, ""); +// files.add(df); +// } catch (TskCoreException ex) { +// postErrorMessage("Error processing " + abstractFile.getName(), +// "Failed to add attachment named " + filename + " to the case."); +// logger.log(Level.INFO, "", ex); +// } +// } +// return files; +// } +// +// /** +// * Add a blackboard artifact for the given email message. +// * @param email +// * @param abstractFile +// */ +// private void addArtifact(EmailMessage email, AbstractFile abstractFile) { +// List bbattributes = new ArrayList<>(); +// String to = email.getRecipients(); +// String cc = email.getCc(); +// String bcc = email.getBcc(); +// String from = email.getSender(); +// long dateL = email.getSentDate(); +// String body = email.getTextBody(); +// String bodyHTML = email.getHtmlBody(); +// String rtf = email.getRtfBody(); +// String subject = email.getSubject(); +// long id = email.getId(); +// String localPath = email.getLocalPath(); +// +// if (to.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_TO.getTypeID(), MODULE_NAME, to)); +// } +// if (cc.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CC.getTypeID(), MODULE_NAME, cc)); +// } +// if (bcc.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_BCC.getTypeID(), MODULE_NAME, bcc)); +// } +// if (from.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_FROM.getTypeID(), MODULE_NAME, from)); +// } +// if (dateL > 0) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_RCVD.getTypeID(), MODULE_NAME, dateL)); +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_SENT.getTypeID(), MODULE_NAME, dateL)); +// } +// if (body.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_PLAIN.getTypeID(), MODULE_NAME, body)); +// } +// if (bodyHTML.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_HTML.getTypeID(), MODULE_NAME, bodyHTML)); +// } +// if (rtf.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_RTF.getTypeID(), MODULE_NAME, rtf)); +// } +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_MSG_ID.getTypeID(), MODULE_NAME, ((id < 0L) ? "Not available" : String.valueOf(id)))); +// if (subject.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SUBJECT.getTypeID(), MODULE_NAME, subject)); +// } +// if (localPath.isEmpty() == false) { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, localPath)); +// } else { +// bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), MODULE_NAME, "/foo/bar")); +// } +// +// try { +// BlackboardArtifact bbart; +// bbart = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG); +// bbart.addAttributes(bbattributes); +// } catch (TskCoreException ex) { +// logger.log(Level.WARNING, null, ex); +// } +// } +// +// void postErrorMessage(String subj, String details) { +// IngestMessage ingestMessage = IngestMessage.createErrorMessage(messageId++, this, subj, details); +// services.postMessage(ingestMessage); +// } +// +// IngestServices getServices() { +// return services; +// } +//} \ No newline at end of file From 274457762cfdb52bdfad7e4c757028563fcef57d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 26 Feb 2014 19:08:53 -0500 Subject: [PATCH 15/48] Complete first minimally working version of new file ingest infrastructure --- .../ingest/IngestConfigurationPanel.java | 3 +- .../AbstractFileTikaTextExtract.java | 3 +- .../KeywordSearchIngestModule.java | 2450 ++++++++--------- 3 files changed, 1221 insertions(+), 1235 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java index 9a8468af3d..44dd491e81 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestConfigurationPanel.java @@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.corecomponents.AdvancedConfigurationDialog; private List modules = new ArrayList<>(); private boolean processUnallocatedSpace = false; private IngestModuleModel selectedModule = null; - private IngestModulesTableModel tableModel = null; IngestConfigurationPanel(List moduleTemplates, boolean processUnallocatedSpace) { for (IngestModuleTemplate moduleTemplate : moduleTemplates) { @@ -80,7 +79,7 @@ import org.sleuthkit.autopsy.corecomponents.AdvancedConfigurationDialog; } private void customizeComponents() { - modulesTable.setModel(tableModel); + modulesTable.setModel(new IngestModulesTableModel()); modulesTable.setTableHeader(null); modulesTable.setSelectionMode(ListSelectionModel.SINGLE_SELECTION); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java index 38c93631ca..2aabfb480e 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java @@ -35,7 +35,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.ReadContentInputStream; import org.apache.tika.Tika; @@ -57,7 +56,7 @@ import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; */ class AbstractFileTikaTextExtract implements AbstractFileExtract { - private static final Logger logger = Logger.getLogger(IngestModuleAbstractFile.class.getName()); + private static final Logger logger = Logger.getLogger(AbstractFileTikaTextExtract.class.getName()); private static final Charset OUTPUT_CHARSET = Server.DEFAULT_INDEXED_TEXT_CHARSET; static final int MAX_EXTR_TEXT_CHARS = 512 * 1024; private static final int SINGLE_READ_CHARS = 1024; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index c25df335d4..fc4679ae0e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -17,56 +17,56 @@ * limitations under the License. */ -//package org.sleuthkit.autopsy.keywordsearch; -// -//import java.awt.event.ActionEvent; -//import java.awt.event.ActionListener; -//import java.io.IOException; -//import java.io.InputStream; -//import java.util.ArrayList; -//import java.util.Collection; -//import java.util.HashMap; -//import java.util.HashSet; -//import java.util.List; -//import java.util.Map; -//import java.util.Set; -//import java.util.concurrent.CancellationException; -//import java.util.concurrent.locks.Lock; -//import java.util.concurrent.locks.ReentrantReadWriteLock; -//import java.util.logging.Level; -//import org.openide.util.NbBundle; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import javax.swing.SwingUtilities; -//import javax.swing.SwingWorker; -//import javax.swing.Timer; -//import org.apache.tika.Tika; -//import org.netbeans.api.progress.aggregate.AggregateProgressFactory; -//import org.netbeans.api.progress.aggregate.AggregateProgressHandle; -//import org.netbeans.api.progress.aggregate.ProgressContributor; -//import org.openide.util.Cancellable; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.coreutils.EscapeUtil; -//import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -//import org.sleuthkit.autopsy.coreutils.StopWatch; -//import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.autopsy.ingest.IngestMessage; -//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -//import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; -//import org.sleuthkit.datamodel.BlackboardArtifact; -//import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -//import org.sleuthkit.datamodel.BlackboardAttribute; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.ReadContentInputStream; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData; -//import org.sleuthkit.datamodel.TskData.FileKnown; +package org.sleuthkit.autopsy.keywordsearch; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CancellationException; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.logging.Level; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.coreutils.Logger; +import javax.swing.SwingUtilities; +import javax.swing.SwingWorker; +import javax.swing.Timer; +import org.apache.tika.Tika; +import org.netbeans.api.progress.aggregate.AggregateProgressFactory; +import org.netbeans.api.progress.aggregate.AggregateProgressHandle; +import org.netbeans.api.progress.aggregate.ProgressContributor; +import org.openide.util.Cancellable; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.coreutils.EscapeUtil; +import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +import org.sleuthkit.autopsy.ingest.IngestModuleInit; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.ReadContentInputStream; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.TskData.FileKnown; + +// RJCTODO: Update for new infrastructure /** * An ingest module on a file level Performs indexing of allocated and Solr * supported files, string extraction and indexing of unallocated and not Solr @@ -77,115 +77,114 @@ * * Registered as a module in layer.xml */ -//public final class KeywordSearchIngestModule extends IngestModuleAbstractFile { -// -// enum UpdateFrequency { -// -// FAST(20), -// AVG(10), -// SLOW(5), -// SLOWEST(1), -// DEFAULT(5); -// private final int time; -// -// UpdateFrequency(int time) { -// this.time = time; -// } -// -// int getTime() { -// return time; -// } -// }; -// private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); -// public static final String MODULE_NAME = NbBundle.getMessage(KeywordSearchIngestModule.class, -// "KeywordSearchIngestModule.moduleName"); -// public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, -// "KeywordSearchIngestModule.moduleDescription"); -// final public static String MODULE_VERSION = Version.getVersion(); -// private static KeywordSearchIngestModule instance = null; -// private IngestServices services; -// private Ingester ingester = null; -// private volatile boolean commitIndex = false; //whether to commit index next time -// private volatile boolean runSearcher = false; //whether to run searcher next time -// private List keywords; //keywords to search -// private List keywordLists; // lists currently being searched -// private Map keywordToList; //keyword to list name mapping -// private Timer commitTimer; -// private Timer searchTimer; -// private Indexer indexer; -// private Searcher currentSearcher; -// private Searcher finalSearcher; -// private volatile boolean searcherDone = true; //mark as done, until it's inited -// private Map> currentResults; -// //only search images from current ingest, not images previously ingested/indexed -// //accessed read-only by searcher thread -// private Set curDataSourceIds; -// private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy -// private static final Lock searcherLock = rwLock.writeLock(); -// private volatile int messageID = 0; -// private boolean processedFiles; -// private volatile boolean finalSearcherDone = true; //mark as done, until it's inited -// private final String hashDBModuleName = NbBundle -// .getMessage(this.getClass(), "KeywordSearchIngestModule.hashDbModuleName"); //NOTE this needs to match the HashDB module getName() -// private SleuthkitCase caseHandle = null; -// private static List textExtractors; -// private static AbstractFileStringExtract stringExtractor; -// private boolean initialized = false; -// private KeywordSearchIngestSimplePanel simpleConfigPanel; -// private KeywordSearchConfigurationPanel advancedConfigPanel; -// private Tika tikaFormatDetector; -// -// -// private enum IngestStatus { -// TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested -// STRINGS_INGESTED, ///< Strings were extracted from file -// METADATA_INGESTED, ///< No content, so we just text_ingested metadata -// SKIPPED_ERROR_INDEXING, ///< File was skipped because index engine had problems -// SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues -// SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it -// }; -// private Map ingestStatus; -// -// //private constructor to ensure singleton instance -// private KeywordSearchIngestModule() { -// } -// -// /** -// * Returns singleton instance of the module, creates one if needed -// * -// * @return instance of the module -// */ -// public static synchronized KeywordSearchIngestModule getDefault() { -// if (instance == null) { -// instance = new KeywordSearchIngestModule(); -// } -// return instance; -// } -// -// @Override -// public ProcessResult process(PipelineContext pipelineContext, AbstractFile abstractFile) { -// -// if (initialized == false) //error initializing indexing/Solr -// { -// logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); -// ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); +public final class KeywordSearchIngestModule { + + enum UpdateFrequency { + + FAST(20), + AVG(10), + SLOW(5), + SLOWEST(1), + DEFAULT(5); + private final int time; + + UpdateFrequency(int time) { + this.time = time; + } + + int getTime() { + return time; + } + }; + private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); + public static final String MODULE_NAME = NbBundle.getMessage(KeywordSearchIngestModule.class, + "KeywordSearchIngestModule.moduleName"); + public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, + "KeywordSearchIngestModule.moduleDescription"); + final public static String MODULE_VERSION = Version.getVersion(); + private static KeywordSearchIngestModule instance = null; + private IngestServices services; + private Ingester ingester = null; + private volatile boolean commitIndex = false; //whether to commit index next time + private volatile boolean runSearcher = false; //whether to run searcher next time + private List keywords; //keywords to search + private List keywordLists; // lists currently being searched + private Map keywordToList; //keyword to list name mapping + private Timer commitTimer; + private Timer searchTimer; + private Indexer indexer; + private Searcher currentSearcher; + private Searcher finalSearcher; + private volatile boolean searcherDone = true; //mark as done, until it's inited + private Map> currentResults; + //only search images from current ingest, not images previously ingested/indexed + //accessed read-only by searcher thread + private Set curDataSourceIds; + private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy + private static final Lock searcherLock = rwLock.writeLock(); + private volatile int messageID = 0; + private boolean processedFiles; + private volatile boolean finalSearcherDone = true; //mark as done, until it's inited + private final String hashDBModuleName = NbBundle + .getMessage(this.getClass(), "KeywordSearchIngestModule.hashDbModuleName"); //NOTE this needs to match the HashDB module getName() + private SleuthkitCase caseHandle = null; + private static List textExtractors; + private static AbstractFileStringExtract stringExtractor; + private boolean initialized = false; + private KeywordSearchIngestSimplePanel simpleConfigPanel; + private KeywordSearchConfigurationPanel advancedConfigPanel; + private Tika tikaFormatDetector; + + + private enum IngestStatus { + TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested + STRINGS_INGESTED, ///< Strings were extracted from file + METADATA_INGESTED, ///< No content, so we just text_ingested metadata + SKIPPED_ERROR_INDEXING, ///< File was skipped because index engine had problems + SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues + SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it + }; + private Map ingestStatus; + + //private constructor to ensure singleton instance + private KeywordSearchIngestModule() { + } + + /** + * Returns singleton instance of the module, creates one if needed + * + * @return instance of the module + */ + public static synchronized KeywordSearchIngestModule getDefault() { + if (instance == null) { + instance = new KeywordSearchIngestModule(); + } + return instance; + } + + public void process(AbstractFile abstractFile) { + + if (initialized == false) //error initializing indexing/Solr + { + logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); + ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); // return ProcessResult.OK; -// } -// try { -// //add data source id of the file to the set, keeping track of images being ingested -// final long fileSourceId = caseHandle.getFileDataSource(abstractFile); -// curDataSourceIds.add(fileSourceId); -// -// } catch (TskCoreException ex) { -// logger.log(Level.SEVERE, "Error getting image id of file processed by keyword search: " + abstractFile.getName(), ex); -// } -// -// if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { -// //skip indexing of virtual dirs (no content, no real name) - will index children files + } + try { + //add data source id of the file to the set, keeping track of images being ingested + final long fileSourceId = caseHandle.getFileDataSource(abstractFile); + curDataSourceIds.add(fileSourceId); + + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error getting image id of file processed by keyword search: " + abstractFile.getName(), ex); + } + + if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { + //skip indexing of virtual dirs (no content, no real name) - will index children files // return ProcessResult.OK; -// } -// -// //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it + } + + //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it // if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { // indexer.indexFile(abstractFile, false); // //notify depending module that keyword search (would) encountered error for this file @@ -193,1086 +192,1075 @@ // return ProcessResult.ERROR; // } // else if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { -// //index meta-data only -// indexer.indexFile(abstractFile, false); + if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { + //index meta-data only + indexer.indexFile(abstractFile, false); // return ProcessResult.OK; -// } -// -// processedFiles = true; -// -// //check if it's time to commit after previous processing -// checkRunCommitSearch(); -// -// //index the file and content (if the content is supported) -// indexer.indexFile(abstractFile, true); -// + } + + processedFiles = true; + + //check if it's time to commit after previous processing + checkRunCommitSearch(); + + //index the file and content (if the content is supported) + indexer.indexFile(abstractFile, true); + // return ProcessResult.OK; -// } -// -// /** -// * After all files are ingested, execute final index commit and final search -// * Cleanup resources, threads, timers -// */ -// @Override -// public void complete() { -// if (initialized == false) { -// return; -// } -// -// //logger.log(Level.INFO, "complete()"); -// commitTimer.stop(); -// -// //NOTE, we let the 1 before last searcher complete fully, and enqueue the last one -// -// //cancel searcher timer, ensure unwanted searcher does not start -// //before we start the final one -// if (searchTimer.isRunning()) { -// searchTimer.stop(); -// } -// runSearcher = false; -// -// logger.log(Level.INFO, "Running final index commit and search"); -// //final commit -// commit(); -// -// postIndexSummary(); -// -// //run one last search as there are probably some new files committed -// if (keywordLists != null && !keywordLists.isEmpty() && processedFiles == true) { -// finalSearcher = new Searcher(keywordLists, true); //final searcher run -// finalSearcher.execute(); -// } else { -// finalSearcherDone = true; -// } -// -// //log number of files / chunks in index -// //signal a potential change in number of text_ingested files -// try { -// final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); -// final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); -// logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); -// logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); -// } catch (NoOpenCoreException ex) { -// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); -// } catch (KeywordSearchModuleException se) { -// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); -// } -// -// //cleanup done in final searcher -// -// //postSummary(); -// } -// -// /** -// * Handle stop event (ingest interrupted) Cleanup resources, threads, timers -// */ -// @Override -// public void stop() { -// logger.log(Level.INFO, "stop()"); -// -// //stop timer -// commitTimer.stop(); -// //stop currentSearcher -// if (currentSearcher != null) { -// currentSearcher.cancel(true); -// } -// -// //cancel searcher timer, ensure unwanted searcher does not start -// if (searchTimer.isRunning()) { -// searchTimer.stop(); -// } -// runSearcher = false; -// finalSearcherDone = true; -// -// -// //commit uncommited files, don't search again -// commit(); -// -// //postSummary(); -// -// cleanup(); -// } -// -// /** -// * Common cleanup code when module stops or final searcher completes -// */ -// private void cleanup() { -// ingestStatus.clear(); -// currentResults.clear(); -// curDataSourceIds.clear(); -// currentSearcher = null; -// //finalSearcher = null; //do not collect, might be finalizing -// -// commitTimer.stop(); -// searchTimer.stop(); -// commitTimer = null; -// //searchTimer = null; // do not collect, final searcher might still be running, in which case it throws an exception -// -// textExtractors.clear(); -// textExtractors = null; -// stringExtractor = null; -// -// keywords.clear(); -// keywordLists.clear(); -// keywordToList.clear(); -// -// tikaFormatDetector = null; -// -// initialized = false; -// } -// -// @Override -// public String getName() { -// return MODULE_NAME; -// } -// -// @Override -// public String getDescription() { -// return MODULE_DESCRIPTION; -// } -// -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// /** -// * Initializes the module for new ingest run Sets up threads, timers, -// * retrieves settings, keyword lists to run on -// * -// */ -// @Override -// public void init(IngestModuleInit initContext) { -// logger.log(Level.INFO, "init()"); -// services = IngestServices.getDefault(); -// initialized = false; -// -// caseHandle = Case.getCurrentCase().getSleuthkitCase(); -// -// tikaFormatDetector = new Tika(); -// -// ingester = Server.getIngester(); -// -// final Server server = KeywordSearch.getServer(); -// try { -// if (!server.isRunning()) { -// String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); -// logger.log(Level.SEVERE, msg); -// String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + } + + /** + * After all files are ingested, execute final index commit and final search + * Cleanup resources, threads, timers + */ + public void complete() { + if (initialized == false) { + return; + } + + //logger.log(Level.INFO, "complete()"); + commitTimer.stop(); + + //NOTE, we let the 1 before last searcher complete fully, and enqueue the last one + + //cancel searcher timer, ensure unwanted searcher does not start + //before we start the final one + if (searchTimer.isRunning()) { + searchTimer.stop(); + } + runSearcher = false; + + logger.log(Level.INFO, "Running final index commit and search"); + //final commit + commit(); + + postIndexSummary(); + + //run one last search as there are probably some new files committed + if (keywordLists != null && !keywordLists.isEmpty() && processedFiles == true) { + finalSearcher = new Searcher(keywordLists, true); //final searcher run + finalSearcher.execute(); + } else { + finalSearcherDone = true; + } + + //log number of files / chunks in index + //signal a potential change in number of text_ingested files + try { + final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); + final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); + logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); + logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); + } catch (NoOpenCoreException ex) { + logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); + } catch (KeywordSearchModuleException se) { + logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); + } + + //cleanup done in final searcher + + //postSummary(); + } + + /** + * Handle stop event (ingest interrupted) Cleanup resources, threads, timers + */ + public void stop() { + logger.log(Level.INFO, "stop()"); + + //stop timer + commitTimer.stop(); + //stop currentSearcher + if (currentSearcher != null) { + currentSearcher.cancel(true); + } + + //cancel searcher timer, ensure unwanted searcher does not start + if (searchTimer.isRunning()) { + searchTimer.stop(); + } + runSearcher = false; + finalSearcherDone = true; + + + //commit uncommited files, don't search again + commit(); + + //postSummary(); + + cleanup(); + } + + /** + * Common cleanup code when module stops or final searcher completes + */ + private void cleanup() { + ingestStatus.clear(); + currentResults.clear(); + curDataSourceIds.clear(); + currentSearcher = null; + //finalSearcher = null; //do not collect, might be finalizing + + commitTimer.stop(); + searchTimer.stop(); + commitTimer = null; + //searchTimer = null; // do not collect, final searcher might still be running, in which case it throws an exception + + textExtractors.clear(); + textExtractors = null; + stringExtractor = null; + + keywords.clear(); + keywordLists.clear(); + keywordToList.clear(); + + tikaFormatDetector = null; + + initialized = false; + } + + public String getName() { + return MODULE_NAME; + } + + public String getDescription() { + return MODULE_DESCRIPTION; + } + + public String getVersion() { + return MODULE_VERSION; + } + + /** + * Initializes the module for new ingest run Sets up threads, timers, + * retrieves settings, keyword lists to run on + * + */ + public void init(IngestModuleInit initContext) { + logger.log(Level.INFO, "init()"); + services = IngestServices.getDefault(); + initialized = false; + + caseHandle = Case.getCurrentCase().getSleuthkitCase(); + + tikaFormatDetector = new Tika(); + + ingester = Server.getIngester(); + + final Server server = KeywordSearch.getServer(); + try { + if (!server.isRunning()) { + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + logger.log(Level.SEVERE, msg); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); // services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); -// return; -// -// } -// } catch (KeywordSearchModuleException ex) { -// logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); -// //this means Solr is not properly initialized -// String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); -// String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + return; + + } + } catch (KeywordSearchModuleException ex) { + logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); + //this means Solr is not properly initialized + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); // services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); -// return; -// } -// -// -// //initialize extractors -// stringExtractor = new AbstractFileStringExtract(); -// stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); -// stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); -// -// -// //log the scripts used for debugging -// final StringBuilder sbScripts = new StringBuilder(); -// for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { -// sbScripts.append(s.name()).append(" "); -// } -// logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); -// -// textExtractors = new ArrayList(); -// //order matters, more specific extractors first -// textExtractors.add(new AbstractFileHtmlExtract()); -// textExtractors.add(new AbstractFileTikaTextExtract()); -// -// -// ingestStatus = new HashMap(); -// -// keywords = new ArrayList(); -// keywordLists = new ArrayList(); -// keywordToList = new HashMap(); -// -// initKeywords(); -// -// if (keywords.isEmpty() || keywordLists.isEmpty()) { + return; + } + + + //initialize extractors + stringExtractor = new AbstractFileStringExtract(); + stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); + stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); + + + //log the scripts used for debugging + final StringBuilder sbScripts = new StringBuilder(); + for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { + sbScripts.append(s.name()).append(" "); + } + logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); + + textExtractors = new ArrayList(); + //order matters, more specific extractors first + textExtractors.add(new AbstractFileHtmlExtract()); + textExtractors.add(new AbstractFileTikaTextExtract()); + + + ingestStatus = new HashMap(); + + keywords = new ArrayList(); + keywordLists = new ArrayList(); + keywordToList = new HashMap(); + + initKeywords(); + + if (keywords.isEmpty() || keywordLists.isEmpty()) { // services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), // NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); -// } -// -// processedFiles = false; -// finalSearcherDone = false; -// searcherDone = true; //make sure to start the initial currentSearcher -// //keeps track of all results per run not to repeat reporting the same hits -// currentResults = new HashMap>(); -// -// curDataSourceIds = new HashSet(); -// -// indexer = new Indexer(); -// -// final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; -// logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); -// logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); -// -// commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); -// searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); -// -// initialized = true; -// -// commitTimer.start(); -// searchTimer.start(); -// } -// -// @Override -// public boolean hasSimpleConfiguration() { -// return true; -// } -// -// @Override -// public boolean hasAdvancedConfiguration() { -// return true; -// } -// -// @Override -// public javax.swing.JPanel getSimpleConfiguration(String context) { -// KeywordSearchListsXML.getCurrent().reload(); -// -// if (null == simpleConfigPanel) { -// simpleConfigPanel = new KeywordSearchIngestSimplePanel(); -// } -// else { -// simpleConfigPanel.load(); -// } -// -// return simpleConfigPanel; -// } -// -// @Override -// public javax.swing.JPanel getAdvancedConfiguration(String context) { -// if (advancedConfigPanel == null) { -// advancedConfigPanel = new KeywordSearchConfigurationPanel(); -// } -// -// advancedConfigPanel.load(); -// return advancedConfigPanel; -// } -// -// @Override -// public void saveAdvancedConfiguration() { -// if (advancedConfigPanel != null) { -// advancedConfigPanel.store(); -// } -// -// if (simpleConfigPanel != null) { -// simpleConfigPanel.load(); -// } -// } -// -// @Override -// public void saveSimpleConfiguration() { -// KeywordSearchListsXML.getCurrent().save(); -// } -// -// /** -// * The modules maintains background threads, return true if background -// * threads are running or there are pending tasks to be run in the future, -// * such as the final search post-ingest completion -// * -// * @return -// */ -// @Override -// public boolean hasBackgroundJobsRunning() { -// if ((currentSearcher != null && searcherDone == false) -// || (finalSearcherDone == false)) { -// return true; -// } else { -// return false; -// } -// -// } -// -// /** -// * Commits index and notifies listeners of index update -// */ -// private void commit() { -// if (initialized) { -// logger.log(Level.INFO, "Commiting index"); -// ingester.commit(); -// logger.log(Level.INFO, "Index comitted"); -// //signal a potential change in number of text_ingested files -// indexChangeNotify(); -// } -// } -// -// /** -// * Posts inbox message with summary of text_ingested files -// */ -// private void postIndexSummary() { -// int text_ingested = 0; -// int metadata_ingested = 0; -// int strings_ingested = 0; -// int error_text = 0; -// int error_index = 0; -// int error_io = 0; -// for (IngestStatus s : ingestStatus.values()) { -// switch (s) { -// case TEXT_INGESTED: -// ++text_ingested; -// break; -// case METADATA_INGESTED: -// ++metadata_ingested; -// break; -// case STRINGS_INGESTED: -// ++strings_ingested; -// break; -// case SKIPPED_ERROR_TEXTEXTRACT: -// error_text++; -// break; -// case SKIPPED_ERROR_INDEXING: -// error_index++; -// break; -// case SKIPPED_ERROR_IO: -// error_io++; -// break; -// default: -// ; -// } -// } -// -// StringBuilder msg = new StringBuilder(); -// msg.append(""); -// msg.append(""); -// msg.append(""); -// msg.append(""); -// msg.append(""); -// msg.append(""); -// msg.append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("").append(text_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.fileGenStringsHead")).append("").append(strings_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.mdOnlyLbl")).append("").append(metadata_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrLbl")).append("").append(error_index).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errTxtLbl")).append("").append(error_text).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append("
    "); -// String indexStats = msg.toString(); -// logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); + } + + processedFiles = false; + finalSearcherDone = false; + searcherDone = true; //make sure to start the initial currentSearcher + //keeps track of all results per run not to repeat reporting the same hits + currentResults = new HashMap>(); + + curDataSourceIds = new HashSet(); + + indexer = new Indexer(); + + final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; + logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); + logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); + + commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); + searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); + + initialized = true; + + commitTimer.start(); + searchTimer.start(); + } + + public boolean hasSimpleConfiguration() { + return true; + } + + public boolean hasAdvancedConfiguration() { + return true; + } + + public javax.swing.JPanel getSimpleConfiguration(String context) { + KeywordSearchListsXML.getCurrent().reload(); + + if (null == simpleConfigPanel) { + simpleConfigPanel = new KeywordSearchIngestSimplePanel(); + } + else { + simpleConfigPanel.load(); + } + + return simpleConfigPanel; + } + + public javax.swing.JPanel getAdvancedConfiguration(String context) { + if (advancedConfigPanel == null) { + advancedConfigPanel = new KeywordSearchConfigurationPanel(); + } + + advancedConfigPanel.load(); + return advancedConfigPanel; + } + + public void saveAdvancedConfiguration() { + if (advancedConfigPanel != null) { + advancedConfigPanel.store(); + } + + if (simpleConfigPanel != null) { + simpleConfigPanel.load(); + } + } + + public void saveSimpleConfiguration() { + KeywordSearchListsXML.getCurrent().save(); + } + + /** + * The modules maintains background threads, return true if background + * threads are running or there are pending tasks to be run in the future, + * such as the final search post-ingest completion + * + * @return + */ + public boolean hasBackgroundJobsRunning() { + if ((currentSearcher != null && searcherDone == false) + || (finalSearcherDone == false)) { + return true; + } else { + return false; + } + + } + + /** + * Commits index and notifies listeners of index update + */ + private void commit() { + if (initialized) { + logger.log(Level.INFO, "Commiting index"); + ingester.commit(); + logger.log(Level.INFO, "Index comitted"); + //signal a potential change in number of text_ingested files + indexChangeNotify(); + } + } + + /** + * Posts inbox message with summary of text_ingested files + */ + private void postIndexSummary() { + int text_ingested = 0; + int metadata_ingested = 0; + int strings_ingested = 0; + int error_text = 0; + int error_index = 0; + int error_io = 0; + for (IngestStatus s : ingestStatus.values()) { + switch (s) { + case TEXT_INGESTED: + ++text_ingested; + break; + case METADATA_INGESTED: + ++metadata_ingested; + break; + case STRINGS_INGESTED: + ++strings_ingested; + break; + case SKIPPED_ERROR_TEXTEXTRACT: + error_text++; + break; + case SKIPPED_ERROR_INDEXING: + error_index++; + break; + case SKIPPED_ERROR_IO: + error_io++; + break; + default: + ; + } + } + + StringBuilder msg = new StringBuilder(); + msg.append(""); + msg.append(""); + msg.append(""); + msg.append(""); + msg.append(""); + msg.append(""); + msg.append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("").append(text_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.fileGenStringsHead")).append("").append(strings_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.mdOnlyLbl")).append("").append(metadata_ingested).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrLbl")).append("").append(error_index).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errTxtLbl")).append("").append(error_text).append("
    ").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append("
    "); + String indexStats = msg.toString(); + logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); + // RJCTODO // services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats)); -// if (error_index > 0) { -// MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrsTitle"), -// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrMsgFiles", error_index)); -// } -// else if (error_io + error_text > 0) { -// MessageNotifyUtil.Notify.warn(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxWarnMsgTitle"), -// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrReadFilesMsg")); -// } -// } -// -// /** -// * Helper method to notify listeners on index update -// */ -// private void indexChangeNotify() { -// //signal a potential change in number of text_ingested files -// try { -// final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); -// KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); -// } catch (NoOpenCoreException ex) { -// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); -// } catch (KeywordSearchModuleException se) { -// logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); -// } -// } -// -// /** -// * Initialize the keyword search lists and associated keywords from the XML -// * loader Use the lists to ingest that are set in the permanent XML -// * configuration -// */ -// private void initKeywords() { -// addKeywordLists(null); -// } -// -// /** -// * If ingest is ongoing, this will add additional keyword search lists to -// * the ongoing ingest The lists to add may be temporary and not necessary -// * set to be added to ingest permanently in the XML configuration. The lists -// * will be reset back to original (permanent configuration state) on the -// * next ingest. -// * -// * @param listsToAdd lists to add temporarily to the ongoing ingest -// */ -// void addKeywordLists(List listsToAdd) { -// KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); -// -// keywords.clear(); -// keywordLists.clear(); -// keywordToList.clear(); -// -// StringBuilder sb = new StringBuilder(); -// -// for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { -// final String listName = list.getName(); -// if (list.getUseForIngest() == true -// || (listsToAdd != null && listsToAdd.contains(listName))) { -// keywordLists.add(listName); -// sb.append(listName).append(" "); -// } -// for (Keyword keyword : list.getKeywords()) { -// if (!keywords.contains(keyword)) { -// keywords.add(keyword); -// keywordToList.put(keyword.getQuery(), list); -// } -// } -// -// } -// -// logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); -// -// } -// -// List getKeywordLists() { -// return keywordLists == null ? new ArrayList() : keywordLists; -// } -// -// /** -// * Check if time to commit, if so, run commit. Then run search if search -// * timer is also set. -// */ -// void checkRunCommitSearch() { -// if (commitIndex) { -// logger.log(Level.INFO, "Commiting index"); -// commit(); -// commitIndex = false; -// -// //after commit, check if time to run searcher -// //NOTE commit/searcher timings don't need to align -// //in worst case, we will run search next time after commit timer goes off, or at the end of ingest -// if (searcherDone && runSearcher) { -// //start search if previous not running -// if (keywordLists != null && !keywordLists.isEmpty()) { -// currentSearcher = new Searcher(keywordLists); -// currentSearcher.execute();//searcher will stop timer and restart timer when done -// } -// } -// } -// } -// -// /** -// * CommitTimerAction to run by commitTimer Sets a flag to indicate we are -// * ready for commit -// */ -// private class CommitTimerAction implements ActionListener { -// -// private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); -// -// @Override -// public void actionPerformed(ActionEvent e) { -// commitIndex = true; -// logger.log(Level.INFO, "CommitTimer awake"); -// } -// } -// -// /** -// * SearchTimerAction to run by searchTimer Sets a flag to indicate we are -// * ready to search -// */ -// private class SearchTimerAction implements ActionListener { -// -// private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); -// -// @Override -// public void actionPerformed(ActionEvent e) { -// runSearcher = true; -// logger.log(Level.INFO, "SearchTimer awake"); -// } -// } -// -// /** -// * File indexer, processes and indexes known/allocated files, -// * unknown/unallocated files and directories accordingly -// */ -// private class Indexer { -// -// private final Logger logger = Logger.getLogger(Indexer.class.getName()); -// -// /** -// * Extract text with Tika or other text extraction modules (by -// * streaming) from the file Divide the file into chunks and index the -// * chunks -// * -// * @param aFile file to extract strings from, divide into chunks and -// * index -// * @param detectedFormat mime-type detected, or null if none detected -// * @return true if the file was text_ingested, false otherwise -// * @throws IngesterException exception thrown if indexing failed -// */ -// private boolean extractTextAndIndex(AbstractFile aFile, String detectedFormat) throws IngesterException { -// AbstractFileExtract fileExtract = null; -// -// //go over available text extractors in order, and pick the first one (most specific one) -// for (AbstractFileExtract fe : textExtractors) { -// if (fe.isSupported(aFile, detectedFormat)) { -// fileExtract = fe; -// break; -// } -// } -// -// if (fileExtract == null) { -// logger.log(Level.INFO, "No text extractor found for file id:" -// + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); -// return false; -// } -// -// //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); -// -// //divide into chunks and index -// return fileExtract.index(aFile); -// } -// -// /** -// * Extract strings using heuristics from the file and add to index. -// * -// * @param aFile file to extract strings from, divide into chunks and -// * index -// * @return true if the file was text_ingested, false otherwise -// */ -// private boolean extractStringsAndIndex(AbstractFile aFile) { -// try { -// if (stringExtractor.index(aFile)) { -// ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); -// return true; -// } else { -// logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); -// return false; -// } -// } catch (IngesterException ex) { -// logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); -// return false; -// } -// } -// -// /** -// * Check with every extractor if it supports the file with the detected -// * format -// * -// * @param aFile file to check for -// * @param detectedFormat mime-type with detected format (such as -// * text/plain) or null if not detected -// * @return true if text extraction is supported -// */ -// private boolean isTextExtractSupported(AbstractFile aFile, String detectedFormat) { -// for (AbstractFileExtract extractor : textExtractors) { -// if (extractor.isContentTypeSpecific() == true -// && extractor.isSupported(aFile, detectedFormat)) { -// return true; -// } -// } -// return false; -// } -// -// /** -// * Adds the file to the index. Detects file type, calls extractors, etc. -// * -// * @param aFile File to analyze -// * @param indexContent False if only metadata should be text_ingested. True if -// * content and metadata should be index. -// */ -// private void indexFile(AbstractFile aFile, boolean indexContent) { -// //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); -// -// TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); -// -// // unallocated and unused blocks can only have strings extracted from them. -// if ((aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS))) { -// extractStringsAndIndex(aFile); -// } -// -// final long size = aFile.getSize(); -// //if not to index content, or a dir, or 0 content, index meta data only -// if ((indexContent == false || aFile.isDir() || size == 0)) { -// try { -// ingester.ingest(aFile, false); //meta-data only -// ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); -// } -// catch (IngesterException ex) { -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); -// logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); -// } -// return; -// } -// -// //use Tika to detect the format -// String detectedFormat = null; -// InputStream is = null; -// try { -// is = new ReadContentInputStream(aFile); -// detectedFormat = tikaFormatDetector.detect(is, aFile.getName()); -// } -// catch (Exception e) { -// logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e); -// } -// finally { -// if (is != null) { -// try { -// is.close(); -// } catch (IOException ex) { -// logger.log(Level.WARNING, "Could not close stream after detecting format using tika for file: " -// + aFile, ex); -// } -// } -// } -// -// // @@@ Add file type signature to blackboard here -// -// //logger.log(Level.INFO, "Detected format: " + aFile.getName() + " " + detectedFormat); -// -// // we skip archive formats that are opened by the archive module. -// // @@@ We could have a check here to see if the archive module was enabled though... -// if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) { -// try { -// ingester.ingest(aFile, false); //meta-data only -// ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); -// } -// catch (IngesterException ex) { -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); -// logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); -// } -// return; -// } -// -// boolean wasTextAdded = false; -// if (isTextExtractSupported(aFile, detectedFormat)) { -// //extract text with one of the extractors, divide into chunks and index with Solr -// try { -// //logger.log(Level.INFO, "indexing: " + aFile.getName()); -// if (!extractTextAndIndex(aFile, detectedFormat)) { -// logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); -// } else { -// ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); -// wasTextAdded = true; -// } -// -// } catch (IngesterException e) { -// logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", " -// + aFile.getName(), e); -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); -// } catch (Exception e) { -// logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", " -// + aFile.getName(), e); -// ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); -// } -// } -// -// // if it wasn't supported or had an error, default to strings -// if (wasTextAdded == false) { -// extractStringsAndIndex(aFile); -// } -// } -// } -// -// /** -// * Searcher responsible for searching the current index and writing results -// * to blackboard and the inbox. Also, posts results to listeners as Ingest -// * data events. Searches entire index, and keeps track of only new results -// * to report and save. Runs as a background thread. -// */ -// private final class Searcher extends SwingWorker { -// -// /** -// * Searcher has private copies/snapshots of the lists and keywords -// */ -// private List keywords; //keywords to search -// private List keywordLists; // lists currently being searched -// private Map keywordToList; //keyword to list name mapping -// private AggregateProgressHandle progressGroup; -// private final Logger logger = Logger.getLogger(Searcher.class.getName()); -// private boolean finalRun = false; -// -// Searcher(List keywordLists) { -// this.keywordLists = new ArrayList(keywordLists); -// this.keywords = new ArrayList(); -// this.keywordToList = new HashMap(); -// //keywords are populated as searcher runs -// } -// -// Searcher(List keywordLists, boolean finalRun) { -// this(keywordLists); -// this.finalRun = finalRun; -// } -// -// @Override -// protected Object doInBackground() throws Exception { -// if (finalRun) { -// logger.log(Level.INFO, "Pending start of new (final) searcher"); -// } else { -// logger.log(Level.INFO, "Pending start of new searcher"); -// } -// -// final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + -// (finalRun ? (" - "+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); -// progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("+ -// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") +")"), null, new Cancellable() { -// @Override -// public boolean cancel() { -// logger.log(Level.INFO, "Cancelling the searcher by user."); -// if (progressGroup != null) { -// progressGroup.setDisplayName(displayName + " ("+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.cancelMsg") +"...)"); -// } -// return Searcher.this.cancel(true); -// } -// }, null); -// -// updateKeywords(); -// -// ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; -// int i = 0; -// for (Keyword keywordQuery : keywords) { -// subProgresses[i] = -// AggregateProgressFactory.createProgressContributor(keywordQuery.getQuery()); -// progressGroup.addContributor(subProgresses[i]); -// i++; -// } -// -// progressGroup.start(); -// -// //block to ensure previous searcher is completely done with doInBackground() -// //even after previous searcher cancellation, we need to check this -// searcherLock.lock(); -// final StopWatch stopWatch = new StopWatch(); -// stopWatch.start(); -// try { -// logger.log(Level.INFO, "Started a new searcher"); -// progressGroup.setDisplayName(displayName); -// //make sure other searchers are not spawned -// searcherDone = false; -// runSearcher = false; -// if (searchTimer.isRunning()) { -// searchTimer.stop(); -// } -// -// int keywordsSearched = 0; -// -// //updateKeywords(); -// -// for (Keyword keywordQuery : keywords) { -// if (this.isCancelled()) { -// logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); -// return null; -// } -// -// final String queryStr = keywordQuery.getQuery(); -// final KeywordSearchListsAbstract.KeywordSearchList list = keywordToList.get(queryStr); -// final String listName = list.getName(); -// -// //new subProgress will be active after the initial query -// //when we know number of hits to start() with -// if (keywordsSearched > 0) { -// subProgresses[keywordsSearched - 1].finish(); -// } -// -// -// KeywordSearchQuery del = null; -// -// boolean isRegex = !keywordQuery.isLiteral(); -// if (isRegex) { -// del = new TermComponentQuery(keywordQuery); -// } -// else { -// del = new LuceneQuery(keywordQuery); -// del.escape(); -// } -// -// //limit search to currently ingested data sources -// //set up a filter with 1 or more image ids OR'ed -// final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); -// del.addFilter(dataSourceFilter); -// -// Map> queryResult = null; -// -// try { -// queryResult = del.performQuery(); -// } catch (NoOpenCoreException ex) { -// logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); -// //no reason to continue with next query if recovery failed -// //or wait for recovery to kick in and run again later -// //likely case has closed and threads are being interrupted -// return null; -// } catch (CancellationException e) { -// logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); -// return null; -// } catch (Exception e) { -// logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); -// continue; -// } -// -// // calculate new results but substracting results already obtained in this ingest -// // this creates a map of each keyword to the list of unique files that have that hit. -// Map> newResults = filterResults(queryResult, isRegex); -// -// if (!newResults.isEmpty()) { -// -// //write results to BB -// -// //new artifacts created, to report to listeners -// Collection newArtifacts = new ArrayList(); -// -// //scale progress bar more more granular, per result sub-progress, within per keyword -// int totalUnits = newResults.size(); -// subProgresses[keywordsSearched].start(totalUnits); -// int unitProgress = 0; -// String queryDisplayStr = keywordQuery.getQuery(); -// if (queryDisplayStr.length() > 50) { -// queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; -// } -// subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress); -// -// -// /* cycle through the keywords returned -- only one unless it was a regexp */ -// for (final Keyword hitTerm : newResults.keySet()) { -// //checking for cancellation between results -// if (this.isCancelled()) { -// logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); -// return null; -// } -// -// // update progress display -// String hitDisplayStr = hitTerm.getQuery(); -// if (hitDisplayStr.length() > 50) { -// hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; -// } -// subProgresses[keywordsSearched].progress(listName + ": " + hitDisplayStr, unitProgress); -// //subProgresses[keywordsSearched].progress(unitProgress); -// -// // this returns the unique files in the set with the first chunk that has a hit -// Map contentHitsFlattened = ContentHit.flattenResults(newResults.get(hitTerm)); -// for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { -// -// // get the snippet for the first hit in the file -// String snippet = null; -// final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); -// int chunkId = contentHitsFlattened.get(hitFile); -// try { -// snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); -// } catch (NoOpenCoreException e) { -// logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); -// //no reason to continue -// return null; -// } catch (Exception e) { -// logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); -// continue; -// } -// -// // write the blackboard artifact for this keyword in this file -// KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); -// if (written == null) { -// logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); -// continue; -// } -// -// newArtifacts.add(written.getArtifact()); -// -// //generate an ingest inbox message for this keyword in this file -// if (list.getIngestMessages()) { -// StringBuilder subjectSb = new StringBuilder(); -// StringBuilder detailsSb = new StringBuilder(); -// //final int hitFiles = newResults.size(); -// -// if (!keywordQuery.isLiteral()) { -// subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); -// } else { -// subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); -// } -// //subjectSb.append("<"); -// String uniqueKey = null; -// BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); -// if (attr != null) { -// final String keyword = attr.getValueString(); -// subjectSb.append(keyword); -// uniqueKey = keyword.toLowerCase(); -// } -// -// //subjectSb.append(">"); -// //String uniqueKey = queryStr; -// -// //details -// detailsSb.append(""); -// //hit -// detailsSb.append(""); -// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLThLbl")); -// detailsSb.append(""); -// detailsSb.append(""); -// -// //preview -// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); -// if (attr != null) { -// detailsSb.append(""); -// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); -// detailsSb.append(""); -// detailsSb.append(""); -// -// } -// -// //file -// detailsSb.append(""); -// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); -// detailsSb.append(""); -// -// detailsSb.append(""); -// -// -// //list -// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); -// detailsSb.append(""); -// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); -// detailsSb.append(""); -// detailsSb.append(""); -// -// //regex -// if (!keywordQuery.isLiteral()) { -// attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); -// if (attr != null) { -// detailsSb.append(""); -// detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); -// detailsSb.append(""); -// detailsSb.append(""); -// -// } -// } -// detailsSb.append("
    ").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
    ").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
    ").append(hitFile.getParentPath()).append(hitFile.getName()).append("
    ").append(attr.getValueString()).append("
    ").append(attr.getValueString()).append("
    "); -// + if (error_index > 0) { + MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrsTitle"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrMsgFiles", error_index)); + } + else if (error_io + error_text > 0) { + MessageNotifyUtil.Notify.warn(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxWarnMsgTitle"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrReadFilesMsg")); + } + } + + /** + * Helper method to notify listeners on index update + */ + private void indexChangeNotify() { + //signal a potential change in number of text_ingested files + try { + final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); + KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); + } catch (NoOpenCoreException ex) { + logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); + } catch (KeywordSearchModuleException se) { + logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); + } + } + + /** + * Initialize the keyword search lists and associated keywords from the XML + * loader Use the lists to ingest that are set in the permanent XML + * configuration + */ + private void initKeywords() { + addKeywordLists(null); + } + + /** + * If ingest is ongoing, this will add additional keyword search lists to + * the ongoing ingest The lists to add may be temporary and not necessary + * set to be added to ingest permanently in the XML configuration. The lists + * will be reset back to original (permanent configuration state) on the + * next ingest. + * + * @param listsToAdd lists to add temporarily to the ongoing ingest + */ + void addKeywordLists(List listsToAdd) { + KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); + + keywords.clear(); + keywordLists.clear(); + keywordToList.clear(); + + StringBuilder sb = new StringBuilder(); + + for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { + final String listName = list.getName(); + if (list.getUseForIngest() == true + || (listsToAdd != null && listsToAdd.contains(listName))) { + keywordLists.add(listName); + sb.append(listName).append(" "); + } + for (Keyword keyword : list.getKeywords()) { + if (!keywords.contains(keyword)) { + keywords.add(keyword); + keywordToList.put(keyword.getQuery(), list); + } + } + + } + + logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); + + } + + List getKeywordLists() { + return keywordLists == null ? new ArrayList() : keywordLists; + } + + /** + * Check if time to commit, if so, run commit. Then run search if search + * timer is also set. + */ + void checkRunCommitSearch() { + if (commitIndex) { + logger.log(Level.INFO, "Commiting index"); + commit(); + commitIndex = false; + + //after commit, check if time to run searcher + //NOTE commit/searcher timings don't need to align + //in worst case, we will run search next time after commit timer goes off, or at the end of ingest + if (searcherDone && runSearcher) { + //start search if previous not running + if (keywordLists != null && !keywordLists.isEmpty()) { + currentSearcher = new Searcher(keywordLists); + currentSearcher.execute();//searcher will stop timer and restart timer when done + } + } + } + } + + /** + * CommitTimerAction to run by commitTimer Sets a flag to indicate we are + * ready for commit + */ + private class CommitTimerAction implements ActionListener { + + private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName()); + + @Override + public void actionPerformed(ActionEvent e) { + commitIndex = true; + logger.log(Level.INFO, "CommitTimer awake"); + } + } + + /** + * SearchTimerAction to run by searchTimer Sets a flag to indicate we are + * ready to search + */ + private class SearchTimerAction implements ActionListener { + + private final Logger logger = Logger.getLogger(SearchTimerAction.class.getName()); + + @Override + public void actionPerformed(ActionEvent e) { + runSearcher = true; + logger.log(Level.INFO, "SearchTimer awake"); + } + } + + /** + * File indexer, processes and indexes known/allocated files, + * unknown/unallocated files and directories accordingly + */ + private class Indexer { + + private final Logger logger = Logger.getLogger(Indexer.class.getName()); + + /** + * Extract text with Tika or other text extraction modules (by + * streaming) from the file Divide the file into chunks and index the + * chunks + * + * @param aFile file to extract strings from, divide into chunks and + * index + * @param detectedFormat mime-type detected, or null if none detected + * @return true if the file was text_ingested, false otherwise + * @throws IngesterException exception thrown if indexing failed + */ + private boolean extractTextAndIndex(AbstractFile aFile, String detectedFormat) throws IngesterException { + AbstractFileExtract fileExtract = null; + + //go over available text extractors in order, and pick the first one (most specific one) + for (AbstractFileExtract fe : textExtractors) { + if (fe.isSupported(aFile, detectedFormat)) { + fileExtract = fe; + break; + } + } + + if (fileExtract == null) { + logger.log(Level.INFO, "No text extractor found for file id:" + + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); + return false; + } + + //logger.log(Level.INFO, "Extractor: " + fileExtract + ", file: " + aFile.getName()); + + //divide into chunks and index + return fileExtract.index(aFile); + } + + /** + * Extract strings using heuristics from the file and add to index. + * + * @param aFile file to extract strings from, divide into chunks and + * index + * @return true if the file was text_ingested, false otherwise + */ + private boolean extractStringsAndIndex(AbstractFile aFile) { + try { + if (stringExtractor.index(aFile)) { + ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); + return true; + } else { + logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + return false; + } + } catch (IngesterException ex) { + logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + return false; + } + } + + /** + * Check with every extractor if it supports the file with the detected + * format + * + * @param aFile file to check for + * @param detectedFormat mime-type with detected format (such as + * text/plain) or null if not detected + * @return true if text extraction is supported + */ + private boolean isTextExtractSupported(AbstractFile aFile, String detectedFormat) { + for (AbstractFileExtract extractor : textExtractors) { + if (extractor.isContentTypeSpecific() == true + && extractor.isSupported(aFile, detectedFormat)) { + return true; + } + } + return false; + } + + /** + * Adds the file to the index. Detects file type, calls extractors, etc. + * + * @param aFile File to analyze + * @param indexContent False if only metadata should be text_ingested. True if + * content and metadata should be index. + */ + private void indexFile(AbstractFile aFile, boolean indexContent) { + //logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName()); + + TskData.TSK_DB_FILES_TYPE_ENUM aType = aFile.getType(); + + // unallocated and unused blocks can only have strings extracted from them. + if ((aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) || aType.equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS))) { + extractStringsAndIndex(aFile); + } + + final long size = aFile.getSize(); + //if not to index content, or a dir, or 0 content, index meta data only + if ((indexContent == false || aFile.isDir() || size == 0)) { + try { + ingester.ingest(aFile, false); //meta-data only + ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); + } + catch (IngesterException ex) { + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); + } + return; + } + + //use Tika to detect the format + String detectedFormat = null; + InputStream is = null; + try { + is = new ReadContentInputStream(aFile); + detectedFormat = tikaFormatDetector.detect(is, aFile.getName()); + } + catch (Exception e) { + logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e); + } + finally { + if (is != null) { + try { + is.close(); + } catch (IOException ex) { + logger.log(Level.WARNING, "Could not close stream after detecting format using tika for file: " + + aFile, ex); + } + } + } + + // @@@ Add file type signature to blackboard here + + //logger.log(Level.INFO, "Detected format: " + aFile.getName() + " " + detectedFormat); + + // we skip archive formats that are opened by the archive module. + // @@@ We could have a check here to see if the archive module was enabled though... + if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) { + try { + ingester.ingest(aFile, false); //meta-data only + ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); + } + catch (IngesterException ex) { + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); + } + return; + } + + boolean wasTextAdded = false; + if (isTextExtractSupported(aFile, detectedFormat)) { + //extract text with one of the extractors, divide into chunks and index with Solr + try { + //logger.log(Level.INFO, "indexing: " + aFile.getName()); + if (!extractTextAndIndex(aFile, detectedFormat)) { + logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + } else { + ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); + wasTextAdded = true; + } + + } catch (IngesterException e) { + logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", " + + aFile.getName(), e); + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + } catch (Exception e) { + logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", " + + aFile.getName(), e); + ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + } + } + + // if it wasn't supported or had an error, default to strings + if (wasTextAdded == false) { + extractStringsAndIndex(aFile); + } + } + } + + /** + * Searcher responsible for searching the current index and writing results + * to blackboard and the inbox. Also, posts results to listeners as Ingest + * data events. Searches entire index, and keeps track of only new results + * to report and save. Runs as a background thread. + */ + private final class Searcher extends SwingWorker { + + /** + * Searcher has private copies/snapshots of the lists and keywords + */ + private List keywords; //keywords to search + private List keywordLists; // lists currently being searched + private Map keywordToList; //keyword to list name mapping + private AggregateProgressHandle progressGroup; + private final Logger logger = Logger.getLogger(Searcher.class.getName()); + private boolean finalRun = false; + + Searcher(List keywordLists) { + this.keywordLists = new ArrayList(keywordLists); + this.keywords = new ArrayList(); + this.keywordToList = new HashMap(); + //keywords are populated as searcher runs + } + + Searcher(List keywordLists, boolean finalRun) { + this(keywordLists); + this.finalRun = finalRun; + } + + @Override + protected Object doInBackground() throws Exception { + if (finalRun) { + logger.log(Level.INFO, "Pending start of new (final) searcher"); + } else { + logger.log(Level.INFO, "Pending start of new searcher"); + } + + final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") + + (finalRun ? (" - "+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : ""); + progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("+ + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") +")"), null, new Cancellable() { + @Override + public boolean cancel() { + logger.log(Level.INFO, "Cancelling the searcher by user."); + if (progressGroup != null) { + progressGroup.setDisplayName(displayName + " ("+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.cancelMsg") +"...)"); + } + return Searcher.this.cancel(true); + } + }, null); + + updateKeywords(); + + ProgressContributor[] subProgresses = new ProgressContributor[keywords.size()]; + int i = 0; + for (Keyword keywordQuery : keywords) { + subProgresses[i] = + AggregateProgressFactory.createProgressContributor(keywordQuery.getQuery()); + progressGroup.addContributor(subProgresses[i]); + i++; + } + + progressGroup.start(); + + //block to ensure previous searcher is completely done with doInBackground() + //even after previous searcher cancellation, we need to check this + searcherLock.lock(); + final StopWatch stopWatch = new StopWatch(); + stopWatch.start(); + try { + logger.log(Level.INFO, "Started a new searcher"); + progressGroup.setDisplayName(displayName); + //make sure other searchers are not spawned + searcherDone = false; + runSearcher = false; + if (searchTimer.isRunning()) { + searchTimer.stop(); + } + + int keywordsSearched = 0; + + //updateKeywords(); + + for (Keyword keywordQuery : keywords) { + if (this.isCancelled()) { + logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); + return null; + } + + final String queryStr = keywordQuery.getQuery(); + final KeywordSearchListsAbstract.KeywordSearchList list = keywordToList.get(queryStr); + final String listName = list.getName(); + + //new subProgress will be active after the initial query + //when we know number of hits to start() with + if (keywordsSearched > 0) { + subProgresses[keywordsSearched - 1].finish(); + } + + + KeywordSearchQuery del = null; + + boolean isRegex = !keywordQuery.isLiteral(); + if (isRegex) { + del = new TermComponentQuery(keywordQuery); + } + else { + del = new LuceneQuery(keywordQuery); + del.escape(); + } + + //limit search to currently ingested data sources + //set up a filter with 1 or more image ids OR'ed + final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); + del.addFilter(dataSourceFilter); + + Map> queryResult = null; + + try { + queryResult = del.performQuery(); + } catch (NoOpenCoreException ex) { + logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), ex); + //no reason to continue with next query if recovery failed + //or wait for recovery to kick in and run again later + //likely case has closed and threads are being interrupted + return null; + } catch (CancellationException e) { + logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); + return null; + } catch (Exception e) { + logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); + continue; + } + + // calculate new results but substracting results already obtained in this ingest + // this creates a map of each keyword to the list of unique files that have that hit. + Map> newResults = filterResults(queryResult, isRegex); + + if (!newResults.isEmpty()) { + + //write results to BB + + //new artifacts created, to report to listeners + Collection newArtifacts = new ArrayList(); + + //scale progress bar more more granular, per result sub-progress, within per keyword + int totalUnits = newResults.size(); + subProgresses[keywordsSearched].start(totalUnits); + int unitProgress = 0; + String queryDisplayStr = keywordQuery.getQuery(); + if (queryDisplayStr.length() > 50) { + queryDisplayStr = queryDisplayStr.substring(0, 49) + "..."; + } + subProgresses[keywordsSearched].progress(listName + ": " + queryDisplayStr, unitProgress); + + + /* cycle through the keywords returned -- only one unless it was a regexp */ + for (final Keyword hitTerm : newResults.keySet()) { + //checking for cancellation between results + if (this.isCancelled()) { + logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); + return null; + } + + // update progress display + String hitDisplayStr = hitTerm.getQuery(); + if (hitDisplayStr.length() > 50) { + hitDisplayStr = hitDisplayStr.substring(0, 49) + "..."; + } + subProgresses[keywordsSearched].progress(listName + ": " + hitDisplayStr, unitProgress); + //subProgresses[keywordsSearched].progress(unitProgress); + + // this returns the unique files in the set with the first chunk that has a hit + Map contentHitsFlattened = ContentHit.flattenResults(newResults.get(hitTerm)); + for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { + + // get the snippet for the first hit in the file + String snippet = null; + final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); + int chunkId = contentHitsFlattened.get(hitFile); + try { + snippet = LuceneQuery.querySnippet(snippetQuery, hitFile.getId(), chunkId, isRegex, true); + } catch (NoOpenCoreException e) { + logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); + //no reason to continue + return null; + } catch (Exception e) { + logger.log(Level.WARNING, "Error querying snippet: " + snippetQuery, e); + continue; + } + + // write the blackboard artifact for this keyword in this file + KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); + if (written == null) { + logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); + continue; + } + + newArtifacts.add(written.getArtifact()); + + //generate an ingest inbox message for this keyword in this file + if (list.getIngestMessages()) { + StringBuilder subjectSb = new StringBuilder(); + StringBuilder detailsSb = new StringBuilder(); + //final int hitFiles = newResults.size(); + + if (!keywordQuery.isLiteral()) { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExpHitLbl")); + } else { + subjectSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLbl")); + } + //subjectSb.append("<"); + String uniqueKey = null; + BlackboardAttribute attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()); + if (attr != null) { + final String keyword = attr.getValueString(); + subjectSb.append(keyword); + uniqueKey = keyword.toLowerCase(); + } + + //subjectSb.append(">"); + //String uniqueKey = queryStr; + + //details + detailsSb.append(""); + //hit + detailsSb.append(""); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLThLbl")); + detailsSb.append(""); + detailsSb.append(""); + + //preview + attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW.getTypeID()); + if (attr != null) { + detailsSb.append(""); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.previewThLbl")); + detailsSb.append(""); + detailsSb.append(""); + + } + + //file + detailsSb.append(""); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.fileThLbl")); + detailsSb.append(""); + + detailsSb.append(""); + + + //list + attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()); + detailsSb.append(""); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.listThLbl")); + detailsSb.append(""); + detailsSb.append(""); + + //regex + if (!keywordQuery.isLiteral()) { + attr = written.getAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()); + if (attr != null) { + detailsSb.append(""); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.regExThLbl")); + detailsSb.append(""); + detailsSb.append(""); + + } + } + detailsSb.append("
    ").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
    ").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
    ").append(hitFile.getParentPath()).append(hitFile.getName()).append("
    ").append(attr.getValueString()).append("
    ").append(attr.getValueString()).append("
    "); + // services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); -// } -// } //for each file hit -// -// ++unitProgress; -// -// }//for each hit term -// -// //update artifact browser -// if (!newArtifacts.isEmpty()) { -// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); -// } -// } //if has results -// -// //reset the status text before it goes away -// subProgresses[keywordsSearched].progress(""); -// -// ++keywordsSearched; -// -// } //for each keyword -// -// } //end try block -// catch (Exception ex) { -// logger.log(Level.WARNING, "searcher exception occurred", ex); -// } finally { -// try { -// finalizeSearcher(); -// stopWatch.stop(); -// logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); -// } finally { -// searcherLock.unlock(); -// } -// } -// -// return null; -// } -// -// /** -// * Sync-up the updated keywords from the currently used lists in the XML -// */ -// private void updateKeywords() { -// KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); -// -// this.keywords.clear(); -// this.keywordToList.clear(); -// -// for (String name : this.keywordLists) { -// KeywordSearchListsAbstract.KeywordSearchList list = loader.getList(name); -// for (Keyword k : list.getKeywords()) { -// this.keywords.add(k); -// this.keywordToList.put(k.getQuery(), list); -// } -// } -// -// -// } -// -// //perform all essential cleanup that needs to be done right AFTER doInBackground() returns -// //without relying on done() method that is not guaranteed to run after background thread completes -// //NEED to call this method always right before doInBackground() returns -// /** -// * Performs the cleanup that needs to be done right AFTER -// * doInBackground() returns without relying on done() method that is not -// * guaranteed to run after background thread completes REQUIRED to call -// * this method always right before doInBackground() returns -// */ -// private void finalizeSearcher() { -// logger.log(Level.INFO, "Searcher finalizing"); -// SwingUtilities.invokeLater(new Runnable() { -// @Override -// public void run() { -// progressGroup.finish(); -// } -// }); -// searcherDone = true; //next currentSearcher can start -// -// if (finalRun) { -// //this is the final searcher -// logger.log(Level.INFO, "The final searcher in this ingest done."); -// finalSearcherDone = true; -// -// //run module cleanup -// cleanup(); -// } else { -// //start counting time for a new searcher to start -// //unless final searcher is pending -// if (finalSearcher == null) { -// //we need a new Timer object, because restarting previus will not cause firing of the action -// final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; -// searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); -// searchTimer.start(); -// } -// } -// } -// -// //calculate new results but substracting results already obtained in this ingest -// //update currentResults map with the new results -// private Map> filterResults(Map> queryResult, boolean isRegex) { -// Map> newResults = new HashMap>(); -// -// for (String termResult : queryResult.keySet()) { -// List queryTermResults = queryResult.get(termResult); -// -// //translate to list of IDs that we keep track of -// List queryTermResultsIDs = new ArrayList(); -// for (ContentHit ch : queryTermResults) { -// queryTermResultsIDs.add(ch.getId()); -// } -// -// Keyword termResultK = new Keyword(termResult, !isRegex); -// List curTermResults = currentResults.get(termResultK); -// if (curTermResults == null) { -// currentResults.put(termResultK, queryTermResultsIDs); -// newResults.put(termResultK, queryTermResults); -// } else { -// //some AbstractFile hits already exist for this keyword -// for (ContentHit res : queryTermResults) { -// if (!curTermResults.contains(res.getId())) { -// //add to new results -// List newResultsFs = newResults.get(termResultK); -// if (newResultsFs == null) { -// newResultsFs = new ArrayList(); -// newResults.put(termResultK, newResultsFs); -// } -// newResultsFs.add(res); -// curTermResults.add(res.getId()); -// } -// } -// } -// } -// -// return newResults; -// -// } -// } -//} + } + } //for each file hit + + ++unitProgress; + + }//for each hit term + + //update artifact browser + if (!newArtifacts.isEmpty()) { + services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); + } + } //if has results + + //reset the status text before it goes away + subProgresses[keywordsSearched].progress(""); + + ++keywordsSearched; + + } //for each keyword + + } //end try block + catch (Exception ex) { + logger.log(Level.WARNING, "searcher exception occurred", ex); + } finally { + try { + finalizeSearcher(); + stopWatch.stop(); + logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); + } finally { + searcherLock.unlock(); + } + } + + return null; + } + + /** + * Sync-up the updated keywords from the currently used lists in the XML + */ + private void updateKeywords() { + KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); + + this.keywords.clear(); + this.keywordToList.clear(); + + for (String name : this.keywordLists) { + KeywordSearchListsAbstract.KeywordSearchList list = loader.getList(name); + for (Keyword k : list.getKeywords()) { + this.keywords.add(k); + this.keywordToList.put(k.getQuery(), list); + } + } + + + } + + //perform all essential cleanup that needs to be done right AFTER doInBackground() returns + //without relying on done() method that is not guaranteed to run after background thread completes + //NEED to call this method always right before doInBackground() returns + /** + * Performs the cleanup that needs to be done right AFTER + * doInBackground() returns without relying on done() method that is not + * guaranteed to run after background thread completes REQUIRED to call + * this method always right before doInBackground() returns + */ + private void finalizeSearcher() { + logger.log(Level.INFO, "Searcher finalizing"); + SwingUtilities.invokeLater(new Runnable() { + @Override + public void run() { + progressGroup.finish(); + } + }); + searcherDone = true; //next currentSearcher can start + + if (finalRun) { + //this is the final searcher + logger.log(Level.INFO, "The final searcher in this ingest done."); + finalSearcherDone = true; + + //run module cleanup + cleanup(); + } else { + //start counting time for a new searcher to start + //unless final searcher is pending + if (finalSearcher == null) { + //we need a new Timer object, because restarting previus will not cause firing of the action + final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; + searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); + searchTimer.start(); + } + } + } + + //calculate new results but substracting results already obtained in this ingest + //update currentResults map with the new results + private Map> filterResults(Map> queryResult, boolean isRegex) { + Map> newResults = new HashMap>(); + + for (String termResult : queryResult.keySet()) { + List queryTermResults = queryResult.get(termResult); + + //translate to list of IDs that we keep track of + List queryTermResultsIDs = new ArrayList(); + for (ContentHit ch : queryTermResults) { + queryTermResultsIDs.add(ch.getId()); + } + + Keyword termResultK = new Keyword(termResult, !isRegex); + List curTermResults = currentResults.get(termResultK); + if (curTermResults == null) { + currentResults.put(termResultK, queryTermResultsIDs); + newResults.put(termResultK, queryTermResults); + } else { + //some AbstractFile hits already exist for this keyword + for (ContentHit res : queryTermResults) { + if (!curTermResults.contains(res.getId())) { + //add to new results + List newResultsFs = newResults.get(termResultK); + if (newResultsFs == null) { + newResultsFs = new ArrayList(); + newResults.put(termResultK, newResultsFs); + } + newResultsFs.add(res); + curTermResults.add(res.getId()); + } + } + } + } + + return newResults; + + } + } +} From bb2f26d8afcba1a55d0290aec4edfa0516838998 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 27 Feb 2014 06:09:53 -0500 Subject: [PATCH 16/48] Start refactoring to give each DataSourceIngestJob (DataSourceTask) a progress bar --- .../autopsy/ingest/DataSourceIngestJob.java | 211 ++++++++++++++++++ .../autopsy/ingest/DataSourceTask.java | 124 ---------- .../autopsy/ingest/IngestManager.java | 4 +- .../autopsy/ingest/IngestScheduler.java | 67 ++---- 4 files changed, 228 insertions(+), 178 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java new file mode 100644 index 0000000000..3b56dca330 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java @@ -0,0 +1,211 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2013-2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ingest; + +import java.util.List; +import java.util.Objects; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.netbeans.api.progress.ProgressHandle; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.ContentVisitor; +import org.sleuthkit.datamodel.FileSystem; +import org.sleuthkit.datamodel.LayoutFile; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; + +// RJCTODO: Update comment +/** + * Represents a data source-level task to schedule and analyze. + * Children of the data will also be scheduled. + * + * @param T type of Ingest Module / Pipeline (file or data source content) associated with this task + */ +class DataSourceIngestJob { + private final long id; + private final Content dataSource; + private final IngestPipelines ingestPipelines; + private final boolean processUnallocatedSpace; + private final Logger logger = Logger.getLogger(IngestScheduler.class.getName()); + private long fileTasksCount = 0; // RJCTODO: Need additional counters + private int filesToIngestEstimate = 0; // RJCTODO: Rename, change to long, may synchronize + private int filesDequeued = 0; // RJCTODO: Rename, change to long, synchronize + private ProgressHandle progress; + + DataSourceIngestJob(long id, Content dataSource, List ingestModuleTemplates, boolean processUnallocatedSpace) { + this.id = id; + this.dataSource = dataSource; + this.ingestPipelines = new IngestPipelines(id, ingestModuleTemplates); + this.processUnallocatedSpace = processUnallocatedSpace; + } + + long getTaskId() { + return id; + } + + Content getDataSource() { + return dataSource; + } + + IngestPipelines getIngestPipelines() { + return ingestPipelines; + } + + /** + * Returns value of if unallocated space should be analyzed (and scheduled) + * @return True if pipeline should process unallocated space. + */ + boolean getProcessUnallocatedSpace() { + return processUnallocatedSpace; + } + + synchronized void fileTaskScheduled() { + // RJCTODO: Implement the counters for fully, or do list scanning + ++fileTasksCount; + ++filesToIngestEstimate; + } + + synchronized void fileTaskCompleted() { + // RJCTODO: Implement the counters for fully, or do list scanning + --fileTasksCount; + if (0 == fileTasksCount) { + // RJCTODO + } + } + + float getEstimatedPercentComplete() { + if (filesToIngestEstimate == 0) { + return 0; + } + return ((100.f) * filesDequeued) / filesToIngestEstimate; + } + + + @Override + public String toString() { + // RJCTODO: Improve? Is this useful? +// return "ScheduledTask{" + "input=" + dataSource + ", modules=" + modules + '}'; + return "ScheduledTask{ id=" + id + ", dataSource=" + dataSource + '}'; + } + + /** + * Two scheduled tasks are equal when the content and modules are the same. + * This enables us not to enqueue the equal schedules tasks twice into the + * queue/set + * + * @param obj + * @return + */ + @Override + public boolean equals(Object obj) { + // RJCTODO: Revisit this, probably don't need it + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + final DataSourceIngestJob other = (DataSourceIngestJob)obj; + if (this.dataSource != other.dataSource && (this.dataSource == null || !this.dataSource.equals(other.dataSource))) { + return false; + } + + return true; + } + + @Override + public int hashCode() { + // RJCTODO: Probably don't need this + int hash = 5; + hash = 61 * hash + (int) (this.id ^ (this.id >>> 32)); + hash = 61 * hash + Objects.hashCode(this.dataSource); + hash = 61 * hash + Objects.hashCode(this.ingestPipelines); + hash = 61 * hash + (this.processUnallocatedSpace ? 1 : 0); + return hash; + } + + // RJCTODO: Fix comment + /** + * Get counts of ingestable files/dirs for the content input source. + * + * Note, also includes counts of all unalloc children files (for the fs, image, volume) even + * if ingest didn't ask for them + */ + private class GetFilesCountVisitor extends ContentVisitor.Default { + + @Override + protected Long defaultVisit(Content content) { + // Treat content as a data source (e.g., image) file or volume + // system. Look for child file system or layout files. + //recursion stops at fs or unalloc file + return visitChildren(content); + } + + @Override + public Long visit(FileSystem fs) { + // Query the case database to get a count of the files in the + // file system. + try { + StringBuilder sqlWhereClause = new StringBuilder(); + sqlWhereClause.append("( (fs_obj_id = ").append(fs.getId()); + sqlWhereClause.append(") )"); + sqlWhereClause.append(" AND ( (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()); + sqlWhereClause.append(") OR (meta_type = ").append(TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()); + sqlWhereClause.append(" AND (name != '.') AND (name != '..')"); + sqlWhereClause.append(") )"); + String query = sqlWhereClause.toString(); + SleuthkitCase caseDatabase = Case.getCurrentCase().getSleuthkitCase(); + return caseDatabase.countFilesWhere(query); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to get count of all files in file system named " + fs.getName(), ex); + return 0L; + } + } + + @Override + public Long visit(LayoutFile lf) { + // Layout files are not file system files. They are + // "virtual files" created from blocks of data such as unallocated + // space. Count as single files. + return 1L; + } + + private long visitChildren(Content content) { + long count = 0; + try { + List children = content.getChildren(); + if (children.size() > 0) { + for (Content child : children) { + count += child.accept(this); + } + } else { + count = 1; + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Failed to get children of content named " + content.getName(), ex); + } + return count; + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java deleted file mode 100644 index 942190844b..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceTask.java +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2013-2014 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.sleuthkit.autopsy.ingest; - -import java.util.List; -import java.util.Objects; -import org.sleuthkit.datamodel.Content; - -// RJCTODO: Update comment -/** - * Represents a data source-level task to schedule and analyze. - * Children of the data will also be scheduled. - * - * @param T type of Ingest Module / Pipeline (file or data source content) associated with this task - */ -class DataSourceTask { - private final long id; - private final Content dataSource; - private final IngestPipelines ingestPipelines; - private final boolean processUnallocatedSpace; - private long fileTasksCount = 0; // RJCTODO: Need additional counters - - DataSourceTask(long id, Content dataSource, List ingestModuleTemplates, boolean processUnallocatedSpace) { - this.id = id; - this.dataSource = dataSource; - this.ingestPipelines = new IngestPipelines(id, ingestModuleTemplates); - this.processUnallocatedSpace = processUnallocatedSpace; - } - - long getTaskId() { - return id; - } - - Content getDataSource() { - return dataSource; - } - - IngestPipelines getIngestPipelines() { - return ingestPipelines; - } - - /** - * Returns value of if unallocated space should be analyzed (and scheduled) - * @return True if pipeline should process unallocated space. - */ - boolean getProcessUnallocatedSpace() { - return processUnallocatedSpace; - } - - synchronized void fileTaskScheduled() { - // RJCTODO: Implement the counters for fully, or do list scanning - ++fileTasksCount; - } - - synchronized void fileTaskCompleted() { - // RJCTODO: Implement the counters for fully, or do list scanning - --fileTasksCount; - if (0 == fileTasksCount) { - // RJCTODO - } - } - - @Override - public String toString() { - // RJCTODO: Improve? Is this useful? -// return "ScheduledTask{" + "input=" + dataSource + ", modules=" + modules + '}'; - return "ScheduledTask{ id=" + id + ", dataSource=" + dataSource + '}'; - } - - /** - * Two scheduled tasks are equal when the content and modules are the same. - * This enables us not to enqueue the equal schedules tasks twice into the - * queue/set - * - * @param obj - * @return - */ - @Override - public boolean equals(Object obj) { - // RJCTODO: Revisit this, probably don't need it - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - final DataSourceTask other = (DataSourceTask)obj; - if (this.dataSource != other.dataSource && (this.dataSource == null || !this.dataSource.equals(other.dataSource))) { - return false; - } - - return true; - } - - @Override - public int hashCode() { - // RJCTODO: Probably don't need this - int hash = 5; - hash = 61 * hash + (int) (this.id ^ (this.id >>> 32)); - hash = 61 * hash + Objects.hashCode(this.dataSource); - hash = 61 * hash + Objects.hashCode(this.ingestPipelines); - hash = 61 * hash + (this.processUnallocatedSpace ? 1 : 0); - return hash; - } -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 23e04fe88d..3fe167deed 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -64,7 +64,7 @@ public class IngestManager { private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class); private final IngestMonitor ingestMonitor = new IngestMonitor(); // private IngestModuleLoader moduleLoader = null; - private DataSourceTask currentTask = null; // RJCTODO: Temp glue code, remove + private DataSourceIngestJob currentTask = null; // RJCTODO: Temp glue code, remove private long nextDataSourceTaskId = 0; public final static String MODULE_PROPERTIES = "ingest"; @@ -1106,7 +1106,7 @@ public class IngestManager { int processed = 0; for (Content dataSource : dataSources) { final String inputName = dataSource.getName(); - DataSourceTask dataSourceTask = new DataSourceTask(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace); + DataSourceIngestJob dataSourceTask = new DataSourceIngestJob(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace); logger.log(Level.INFO, "Queing data source ingest task: {0}", dataSourceTask); progress.progress("DataSource Ingest" + " " + inputName, processed); scheduler.getDataSourceScheduler().schedule(dataSourceTask); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java index ff3440c6ae..f303f928d9 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestScheduler.java @@ -108,7 +108,7 @@ class IngestScheduler { | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT16.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_FAT32.getValue() | TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS.getValue(); - private final ConcurrentHashMap dataSourceTasks = new ConcurrentHashMap<>(); // RJCTODO: Why weren't these concurrent before? Synchronized methods? + private final ConcurrentHashMap dataSourceTasks = new ConcurrentHashMap<>(); // RJCTODO: Why weren't these concurrent before? Synchronized methods? private final ConcurrentSkipListSet rootDirectoryTasks = new ConcurrentSkipListSet<>(new RootTaskComparator()); private final List directoryTasks = new ArrayList<>(); // private final ConcurrentLinkedQueue directoryTasks = new ConcurrentLinkedQueue<>(); @@ -138,7 +138,7 @@ class IngestScheduler { return sb.toString(); } - synchronized void scheduleIngestOfFiles(DataSourceTask dataSourceTask) { + synchronized void scheduleIngestOfFiles(DataSourceIngestJob dataSourceTask) { // Save the data source task to manage its pipelines. dataSourceTasks.put(dataSourceTask.getTaskId(), dataSourceTask); @@ -202,7 +202,7 @@ class IngestScheduler { * to schedule the parent origin content, with the modules, settings, etc. */ synchronized void scheduleIngestOfDerivedFile(long dataSourceTaskId, AbstractFile file) { - DataSourceTask dataSourceTask = dataSourceTasks.get(dataSourceTaskId); + DataSourceIngestJob dataSourceTask = dataSourceTasks.get(dataSourceTaskId); if (null == dataSourceTask) { // RJCTODO: Handle severe error } @@ -376,43 +376,6 @@ class IngestScheduler { return new ArrayList<>(contentSet); } - /** - * Determine if a module is in a pipeline in the queue. - * @param module - * @return true if it is in the queue. - */ - // RJCTODO: Remove -// synchronized boolean hasModuleEnqueued(IngestModuleAbstractFile module) { -// for (FileTask task : rootProcessTasks) { -// List modules = task.getDataSourceTask().getModules(); -// for (IngestModuleAbstractFile m : modules) { -// if (m.getName().equals(module.getName())) { -// return true; -// } -// } -// } -// -// for (FileTask task : curDirProcessTasks) { -// List modules = task.getDataSourceTask().getModules(); -// for (IngestModuleAbstractFile m : modules) { -// if (m.getName().equals(module.getName())) { -// return true; -// } -// } -// } -// -// for (FileTask task : curFileProcessTasks) { -// List modules = task.getDataSourceTask().getModules(); -// for (IngestModuleAbstractFile m : modules) { -// if (m.getName().equals(module.getName())) { -// return true; -// } -// } -// } -// -// return false; -// } - synchronized void empty() { this.rootDirectoryTasks.clear(); this.directoryTasks.clear(); @@ -490,9 +453,9 @@ class IngestScheduler { */ static class FileTask { private final AbstractFile file; - private final DataSourceTask dataSourceTask; + private final DataSourceIngestJob dataSourceTask; - public FileTask(AbstractFile file, DataSourceTask dataSourceTask) { + public FileTask(AbstractFile file, DataSourceIngestJob dataSourceTask) { this.file = file; this.dataSourceTask = dataSourceTask; } @@ -504,7 +467,7 @@ class IngestScheduler { dataSourceTask.fileTaskCompleted(); } - public DataSourceTask getDataSourceTask() { + public DataSourceIngestJob getDataSourceTask() { return dataSourceTask; } @@ -544,8 +507,8 @@ class IngestScheduler { if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) { return false; } - DataSourceTask thisTask = this.getDataSourceTask(); - DataSourceTask otherTask = other.getDataSourceTask(); + DataSourceIngestJob thisTask = this.getDataSourceTask(); + DataSourceIngestJob otherTask = other.getDataSourceTask(); if (thisTask != otherTask && (thisTask == null || !thisTask.equals(otherTask))) { @@ -801,15 +764,15 @@ class IngestScheduler { /** * DataSourceScheduler ingest scheduler */ - static class DataSourceScheduler implements Iterator { + static class DataSourceScheduler implements Iterator { - private LinkedList tasks; + private LinkedList tasks; DataSourceScheduler() { tasks = new LinkedList<>(); } - synchronized void schedule(DataSourceTask task) { + synchronized void schedule(DataSourceIngestJob task) { try { if (task.getDataSource().getParent() != null) { //only accepting parent-less content objects (Image, parentless VirtualDirectory) @@ -825,12 +788,12 @@ class IngestScheduler { } @Override - public synchronized DataSourceTask next() throws IllegalStateException { + public synchronized DataSourceIngestJob next() throws IllegalStateException { if (!hasNext()) { throw new IllegalStateException("There is no data source tasks in the queue, check hasNext()"); } - final DataSourceTask ret = tasks.pollFirst(); + final DataSourceIngestJob ret = tasks.pollFirst(); return ret; } @@ -841,7 +804,7 @@ class IngestScheduler { */ synchronized List getContents() { List contents = new ArrayList(); - for (DataSourceTask task : tasks) { + for (DataSourceIngestJob task : tasks) { contents.add(task.getDataSource()); } return contents; @@ -869,7 +832,7 @@ class IngestScheduler { public String toString() { StringBuilder sb = new StringBuilder(); sb.append("DataSourceQueue, size: ").append(getCount()); - for (DataSourceTask task : tasks) { + for (DataSourceIngestJob task : tasks) { sb.append(task.toString()).append(" "); } return sb.toString(); From 2b95138f70f4ecf8cd6bcc969cf5b10d1266bed2 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Mon, 3 Mar 2014 22:45:48 -0500 Subject: [PATCH 17/48] Work towards converting core ingest modules to new ingest API --- .../ingest/DataSourceIngestModule.java | 3 +- .../ingest/IngestDataSourceThread.java | 295 +++-- .../IngestDataSourceWorkerController.java | 114 +- .../autopsy/ingest/IngestManager.java | 15 +- .../autopsy/ingest/IngestModuleInit.java | 49 - .../autopsy/ingest/IngestModuleLoader.java | 1065 +---------------- ExifParser/nbproject/project.xml | 8 + .../ExifParserFileIngestModule.java | 406 +++---- .../exifparser/ExifParserModuleFactory.java | 62 + .../hashdatabase/HashDbIngestModule.java | 136 +-- .../KeywordSearchIngestModule.java | 349 +++--- .../KeywordSearchModuleFactory.java | 121 ++ ewfVerify/nbproject/project.xml | 8 + .../ewfverify/EwfVerifierModuleFactory.java | 63 + .../ewfverify/EwfVerifyIngestModule.java | 378 +++--- 15 files changed, 1066 insertions(+), 2006 deletions(-) delete mode 100644 Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java create mode 100755 ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java create mode 100755 KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java create mode 100755 ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 2d6f7d730b..38e5a959a1 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -32,6 +32,5 @@ public interface DataSourceIngestModule extends IngestModule { * @param statusHelper A status helper to be used to report progress and * detect task cancellation. */ -// void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class - void process(Content dataSource); + void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java index 48abeca7b5..d3ee0e1d84 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceThread.java @@ -18,157 +18,154 @@ */ package org.sleuthkit.autopsy.ingest; -//import java.awt.EventQueue; -//import java.util.concurrent.locks.Lock; -//import java.util.concurrent.locks.ReentrantReadWriteLock; -//import java.util.logging.Level; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import javax.swing.SwingWorker; -//import org.netbeans.api.progress.ProgressHandle; -//import org.netbeans.api.progress.ProgressHandleFactory; -//import org.openide.util.Cancellable; -//import org.sleuthkit.autopsy.coreutils.PlatformUtil; -//import org.sleuthkit.autopsy.coreutils.StopWatch; -//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; -//import org.sleuthkit.datamodel.Content; +import java.awt.EventQueue; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import javax.swing.SwingWorker; +import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.ProgressHandleFactory; +import org.openide.util.Cancellable; +import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.coreutils.StopWatch; +import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent; +import org.sleuthkit.datamodel.Content; /** * Worker thread that runs a data source-level ingest module (image, file set virt dir, etc). * Used to process only a single data-source and single module. */ -// class IngestDataSourceThread extends SwingWorker { -// -// private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); -// private ProgressHandle progress; -// private final Content dataSource; -// private final DataSourceIngestModule module; -// private IngestDataSourceWorkerController controller; -// private final IngestManager manager; -// private final IngestModuleInit init; -// private boolean inited; -// //current method of enqueuing data source ingest modules with locks and internal lock queue -// //ensures that we init, run and complete a single data source ingest module at a time -// //uses fairness policy to run them in order enqueued -// private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); -// -// IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module, IngestModuleInit init) { -// this.manager = manager; -// this.dataSource = dataSource; -// this.module = module; -// this.init = init; -// this.inited = false; -// } -// -// Content getContent() { -// return dataSource; -// } -// -// DataSourceIngestModule getModule() { -// return module; -// } -// -// public void init() { -// -// logger.log(Level.INFO, "Initializing module: " + module.getName()); -// try { -// module.init(dataSource.getId()); -// inited = true; -// } catch (Exception e) { -// logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run."); -// //will not run -// inited = false; -// throw e; -// } -// } -// -// @Override -// protected Void doInBackground() throws Exception { -// -// logger.log(Level.INFO, "Pending module: " + module.getName()); -// -// final String displayName = module.getName() + " dataSource id:" + dataSource.getId(); -// progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { -// @Override -// public boolean cancel() { -// logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user."); -// if (progress != null) { -// progress.setDisplayName(displayName + " (Cancelling...)"); -// } -// return IngestDataSourceThread.this.cancel(true); -// } -// }); -// progress.start(); -// progress.switchToIndeterminate(); -// -// dataSourceIngestModuleLock.lock(); -// try { -// if (this.isCancelled()) { -// logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName()); -// return Void.TYPE.newInstance(); -// } -// logger.log(Level.INFO, "Starting module: " + module.getName()); -// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); -// progress.setDisplayName(displayName); -// -// if (inited == false) { -// logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName()); -// return Void.TYPE.newInstance(); -// } -// logger.log(Level.INFO, "Starting processing of module: " + module.getName()); -// -// controller = new IngestDataSourceWorkerController(this, progress); -// -// if (isCancelled()) { -// logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation."); -// return Void.TYPE.newInstance(); -// } -// final StopWatch timer = new StopWatch(); -// timer.start(); -// try { -// // RJCTODO -//// module.process(pipelineContext, dataSource, controller); -// } catch (Exception e) { -// logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e); -// } finally { -// timer.stop(); -// logger.log(Level.INFO, "Done processing of module: " + module.getName() -// + " took " + timer.getElapsedTimeSecs() + " secs. to process()"); -// -// -// //cleanup queues (worker and DataSource/module) -// manager.removeDataSourceIngestWorker(this); -// -// if (!this.isCancelled()) { -// logger.log(Level.INFO, "Module " + module.getName() + " completed"); -// try { -// module.complete(); -// } catch (Exception e) { -// logger.log(Level.INFO, "Error completing the module " + module.getName(), e); -// } -// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName()); -// } else { -// logger.log(Level.INFO, "Module " + module.getName() + " stopped"); -// try { -// module.stop(); -// } catch (Exception e) { -// logger.log(Level.INFO, "Error stopping the module" + module.getName(), e); -// } -// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName()); -// } -// -// } -// return Void.TYPE.newInstance(); -// } finally { -// //release the lock so next module can run -// dataSourceIngestModuleLock.unlock(); -// EventQueue.invokeLater(new Runnable() { -// @Override -// public void run() { -// progress.finish(); -// } -// }); -// logger.log(Level.INFO, "Done running module: " + module.getName()); -// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); -// } -// } -//} + class IngestDataSourceThread extends SwingWorker { + + private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName()); + private ProgressHandle progress; + private final Content dataSource; + private final DataSourceIngestModule module; + private IngestDataSourceWorkerController controller; + private final IngestManager manager; + private boolean inited; + //current method of enqueuing data source ingest modules with locks and internal lock queue + //ensures that we init, run and complete a single data source ingest module at a time + //uses fairness policy to run them in order enqueued + private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock(); + + IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module) { + this.manager = manager; + this.dataSource = dataSource; + this.module = module; + this.inited = false; + } + + Content getContent() { + return dataSource; + } + + DataSourceIngestModule getModule() { + return module; + } + + public void init() { + + logger.log(Level.INFO, "Initializing module: {0}", module.getDisplayName()); + try { + module.init(dataSource.getId()); + inited = true; + } catch (Exception e) { + logger.log(Level.INFO, "Failed initializing module: {0}, will not run.", module.getDisplayName()); + //will not run + inited = false; + throw e; + } + } + + @Override + protected Void doInBackground() throws Exception { + + logger.log(Level.INFO, "Pending module: {0}", module.getDisplayName()); + + final String displayName = module.getDisplayName() + " dataSource id:" + dataSource.getId(); + progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() { + @Override + public boolean cancel() { + logger.log(Level.INFO, "DataSource ingest module {0} cancelled by user.", module.getDisplayName()); + if (progress != null) { + progress.setDisplayName(displayName + " (Cancelling...)"); + } + return IngestDataSourceThread.this.cancel(true); + } + }); + progress.start(); + progress.switchToIndeterminate(); + + dataSourceIngestModuleLock.lock(); + try { + if (this.isCancelled()) { + logger.log(Level.INFO, "Cancelled while pending, module: {0}", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + logger.log(Level.INFO, "Starting module: {0}", module.getDisplayName()); + logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); + progress.setDisplayName(displayName); + + if (inited == false) { + logger.log(Level.INFO, "Module wasn''t initialized, will not run: {0}", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + logger.log(Level.INFO, "Starting processing of module: {0}", module.getDisplayName()); + + controller = new IngestDataSourceWorkerController(this, progress); + + if (isCancelled()) { + logger.log(Level.INFO, "Terminating DataSource ingest module {0} due to cancellation.", module.getDisplayName()); + return Void.TYPE.newInstance(); + } + final StopWatch timer = new StopWatch(); + timer.start(); + try { + // RJCTODO +// module.process(pipelineContext, dataSource, controller); + } catch (Exception e) { + logger.log(Level.WARNING, "Exception in module: " + module.getDisplayName() + " DataSource: " + dataSource.getName(), e); + } finally { + timer.stop(); + logger.log(Level.INFO, "Done processing of module: {0} took {1} secs. to process()", new Object[]{module.getDisplayName(), timer.getElapsedTimeSecs()}); + + + //cleanup queues (worker and DataSource/module) + manager.removeDataSourceIngestWorker(this); + + if (!this.isCancelled()) { + logger.log(Level.INFO, "Module {0} completed", module.getDisplayName()); + try { + module.complete(); + } catch (Exception e) { + logger.log(Level.INFO, "Error completing the module " + module.getDisplayName(), e); + } + IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getDisplayName()); + } else { + logger.log(Level.INFO, "Module {0} stopped", module.getDisplayName()); + try { + module.stop(); + } catch (Exception e) { + logger.log(Level.INFO, "Error stopping the module" + module.getDisplayName(), e); + } + IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getDisplayName()); + } + + } + return Void.TYPE.newInstance(); + } finally { + //release the lock so next module can run + dataSourceIngestModuleLock.unlock(); + EventQueue.invokeLater(new Runnable() { + @Override + public void run() { + progress.finish(); + } + }); + logger.log(Level.INFO, "Done running module: {0}", module.getDisplayName()); + logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo()); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java index b7fb96c977..f12ec02370 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestDataSourceWorkerController.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -18,64 +18,64 @@ */ package org.sleuthkit.autopsy.ingest; -//import org.netbeans.api.progress.ProgressHandle; +import org.netbeans.api.progress.ProgressHandle; -// RJCTODO: Rework or replace this code +// RJCTODO: This could use a renaming, really don't want it long-term, but maybe need to keep it for 3.1 DISCUSS /** * Controller for DataSource level ingest modules * Used by modules to check task status and to post progress to */ -//public class IngestDataSourceWorkerController { -// -// private IngestDataSourceThread worker; -// private ProgressHandle progress; -// -// /** -// * Instantiate the controller for the worker -// * @param worker underlying DataSource ingest thread -// * @param progress the progress handle -// */ -// IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { -// this.worker = worker; -// this.progress = progress; -// } -// -// /** -// * Check if the task has been cancelled. This should be polled by the module periodically -// * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup -// * -// * @return true if the task has been cancelled, false otherwise -// */ -// public boolean isCancelled() { -// return worker.isCancelled(); -// } -// -// /** -// * Update the progress bar and switch to determinate mode once number of total work units is known -// * @param workUnits total number of work units for the DataSource ingest task -// */ -// public void switchToDeterminate(int workUnits) { -// if (progress != null) { -// progress.switchToDeterminate(workUnits); -// } -// } -// -// /** -// * Update the progress bar and switch to non determinate mode if number of work units is not known -// */ -// public void switchToInDeterminate() { -// if (progress != null) { -// progress.switchToIndeterminate(); -// } -// } -// -// /** -// * Update the progress bar with the number of work units performed, if in the determinate mode -// * @param workUnits number of work units performed so far by the module -// */ -// public void progress(int workUnits) { -// if (progress != null) { -// progress.progress(worker.getContent().getName(), workUnits); -// } -// } -//} \ No newline at end of file +public class IngestDataSourceWorkerController { + + private IngestDataSourceThread worker; + private ProgressHandle progress; + + /** + * Instantiate the controller for the worker + * @param worker underlying DataSource ingest thread + * @param progress the progress handle + */ + IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) { + this.worker = worker; + this.progress = progress; + } + + /** + * Check if the task has been canceled. This should be polled by the module periodically + * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup + * + * @return true if the task has been canceled, false otherwise + */ + public boolean isCancelled() { + return worker.isCancelled(); + } + + /** + * Update the progress bar and switch to determinate mode once number of total work units is known + * @param workUnits total number of work units for the DataSource ingest task + */ + public void switchToDeterminate(int workUnits) { + if (progress != null) { + progress.switchToDeterminate(workUnits); + } + } + + /** + * Update the progress bar and switch to non determinate mode if number of work units is not known + */ + public void switchToInDeterminate() { + if (progress != null) { + progress.switchToIndeterminate(); + } + } + + /** + * Update the progress bar with the number of work units performed, if in the determinate mode + * @param workUnits number of work units performed so far by the module + */ + public void progress(int workUnits) { + if (progress != null) { + progress.progress(worker.getContent().getName(), workUnits); + } + } +} \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 3fe167deed..6c7c226568 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -58,7 +58,7 @@ public class IngestManager { // private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement private final IngestScheduler scheduler; private IngestAbstractFileProcessor abstractFileIngester; -// private List dataSourceIngesters; // RJCTODO: Adapt to new paradigm + private List dataSourceIngesters; private SwingWorker queueWorker; // private final Map abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class); @@ -672,14 +672,13 @@ public class IngestManager { } } - // RJCTODO: Data source ingest is temporarily disabled //data source worker to remove itself when complete or interrupted -// void removeDataSourceIngestWorker(IngestDataSourceThread worker) { -// //remove worker -// synchronized (this) { -// dataSourceIngesters.remove(worker); -// } -// } + void removeDataSourceIngestWorker(IngestDataSourceThread worker) { + //remove worker + synchronized (this) { + dataSourceIngesters.remove(worker); + } + } // RJCTODO: Decide whether or not to reimplement this class /** diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java deleted file mode 100644 index 013a123065..0000000000 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleInit.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2012 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.ingest; - -/** - * - * Context passed to a module at initialization time. - * It may contain module configuration required to initialize some modules. - */ -public class IngestModuleInit { - -// private String moduleArgs; - - /** - * Get module arguments - * @return module args string, used by some modules - */ -// public String getModuleArgs() { -// return moduleArgs; -// } - - /** - * Sets module args. string (only used by module pipeline) - * @param moduleArgs arguments to set for the module - */ -// void setModuleArgs(String moduleArgs) { -// this.moduleArgs = moduleArgs; -// } -// - - - -} diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java index d8e33eed9b..82a4c1e7f9 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleLoader.java @@ -19,1075 +19,44 @@ package org.sleuthkit.autopsy.ingest; -import java.beans.PropertyChangeListener; -import java.beans.PropertyChangeSupport; -import java.io.File; -import java.io.FilenameFilter; -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.lang.reflect.Modifier; -import java.lang.reflect.Type; -import java.net.MalformedURLException; -import java.net.URL; -import java.net.URLDecoder; -import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.logging.Level; -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import org.openide.modules.ModuleInfo; -import org.openide.util.Exceptions; import org.openide.util.Lookup; -import org.openide.util.LookupEvent; -import org.openide.util.LookupListener; -import org.reflections.Reflections; -import org.reflections.scanners.ResourcesScanner; -import org.reflections.scanners.SubTypesScanner; -import org.reflections.util.ConfigurationBuilder; -import org.reflections.util.FilterBuilder; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; -import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.coreutils.XMLUtil; -import org.w3c.dom.Comment; -import org.w3c.dom.Document; -import org.w3c.dom.Element; -import org.w3c.dom.NodeList; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -// RJCTODO: Rewrite comment, complete reworking of class -/** - * Class responsible for discovery and loading ingest modules specified in - * pipeline XML file. Maintains a singleton instance. Requires restart of - * application for changes in XML to take effect. - * - * Supports module auto-discovery from system-wide and user-dir wide jar files. - * Discovered modules are validated, and if valid, they are added to end of - * configuration and saved in the XML. - * - * If module is removed/uninstalled, it will remain in the XML file, but it will - * not load because it will fail the validation. - * - * Get a handle to the object by calling static getDefault() method. The - * singleton instance will initialize itself the first time - it will load XML - * and autodiscover currently present ingest modules in the jar classpath.. - * - * - * Refer to - * http://sleuthkit.org/sleuthkit/docs/framework-docs/pipeline_config_page.html - * for the pipeline XML fiel schema details. - * - * NOTE: this will be part of future IngestPipelineManager with IngestManager - * code refactored - */ - final class IngestModuleLoader { - private static IngestModuleLoader instance; - private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); - private ArrayList moduleFactories = new ArrayList<>(); - private PropertyChangeSupport pcs; -// private static final String PIPELINE_CONFIG_XML = "pipeline_config.xml"; -// private static final String XSDFILE = "PipelineConfigSchema.xsd"; -// private String absFilePath; - //raw XML pipeline representation for validation -// private final List pipelinesXML; - //validated pipelines with instantiated modules -// private final List filePipeline; -// private final List dataSourcePipeline; -// private ClassLoader classLoader; -// private static final String ENCODING = "UTF-8"; -// private static final String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss"; -// private SimpleDateFormat dateFormatter; - //used to specify default unique module order of autodiscovered modules - //if not specified -// private int numModDiscovered = 0; -// private static String CUR_MODULES_DISCOVERED_SETTING = "curModulesDiscovered"; - - //events supported - enum Event { - ModulesReloaded - }; +// RJCTODO: Comment +final class IngestModuleLoader { + private static IngestModuleLoader instance; + private ArrayList moduleFactories = new ArrayList<>(); private IngestModuleLoader() { -// pipelinesXML = new ArrayList<>(); -// filePipeline = new ArrayList(); -// dataSourcePipeline = new ArrayList(); -// dateFormatter = new SimpleDateFormat(DATE_FORMAT); -// -// String numModDiscoveredStr = ModuleSettings.getConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING); -// if (numModDiscoveredStr != null) { -// try { -// numModDiscovered = Integer.valueOf(numModDiscoveredStr); -// } catch (NumberFormatException e) { -// numModDiscovered = 0; -// logger.log(Level.WARNING, "Could not parse numModDiscovered setting, defaulting to 0", e); -// } -// } - - pcs = new PropertyChangeSupport(this); -// registerModulesChange(); } - synchronized static IngestModuleLoader getDefault() /*throws IngestModuleLoaderException*/ { + synchronized static IngestModuleLoader getDefault() { if (instance == null) { - logger.log(Level.INFO, "Creating ingest module loader instance"); + Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance"); instance = new IngestModuleLoader(); instance.init(); } return instance; } - /** - * Add a listener to listen for modules reloaded events such as when new - * modules have been added / removed / reconfigured - * - * @param l listener to add - */ - void addModulesReloadedListener(PropertyChangeListener l) { - pcs.addPropertyChangeListener(l); - } - - /** - * Remove a listener to listen for modules reloaded events such as when new - * modules have been added / removed / reconfigured - * - * @param l listener to remove - */ - void removeModulesReloadedListener(PropertyChangeListener l) { - pcs.removePropertyChangeListener(l); - } - - /** - * validate raw pipeline, set valid to true member on pipeline and modules - * if valid log if invalid - * - * valid pipeline: valid pipeline type, modules have unique ordering - * - * valid module: module class exists, module can be loaded, module - * implements correct interface, module has proper methods and modifiers to - * create an instance - * - * @throws IngestModuleLoaderException - */ -// private void validate() throws IngestModuleLoaderException { -// for (IngestModuleLoader.IngestPipelineXMLDescriptor pRaw : pipelinesXML) { -// boolean pipelineErrors = false; -// -// //check pipelineType -// String pipelineType = pRaw.type; -// -// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pType = null; -// -// try { -// pType = IngestModuleLoader.IngestPipelineXMLDescriptor.getPipelineType(pipelineType); -// } catch (IllegalArgumentException e) { -// pipelineErrors = true; -// logger.log(Level.SEVERE, "Unknown pipeline type: " + pipelineType); -// -// } -// //ordering store -// Map orderings = new HashMap(); -// -// for (IngestModuleLoader.IngestModuleXMLDescriptor pMod : pRaw.modules) { -// boolean moduleErrors = false; -// -// //record ordering for validation -// int order = pMod.order; -// if (orderings.containsKey(order)) { -// orderings.put(order, orderings.get(order) + 1); -// } else { -// orderings.put(order, 1); -// } -// -// //check pipelineType -// String modType = pMod.type; -// if (!modType.equals(IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString())) { -// moduleErrors = true; -// logger.log(Level.SEVERE, "Unknown module type: " + modType); -// } -// -// //classes exist and interfaces implemented -// String location = pMod.location; -// try { -// //netbeans uses custom class loader, otherwise can't load classes from other modules -// -// final Class moduleClass = Class.forName(location, false, classLoader); -// final Type intf = moduleClass.getGenericSuperclass(); -// -// if (pType != null) { -// Class moduleMeta = ((IngestModuleMapping) pType).getIngestModuleInterface(); -// String moduleIntNameCan = moduleMeta.getCanonicalName(); -// String[] moduleIntNameTok = moduleIntNameCan.split(" "); -// String moduleIntName = moduleIntNameTok[moduleIntNameTok.length - 1]; -// -// String intNameCan = intf.toString(); -// String[] intNameCanTok = intNameCan.split(" "); -// String intName = intNameCanTok[intNameCanTok.length - 1]; -// if (!intName.equals(moduleIntName)) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location -// + " does not implement correct interface: " + moduleMeta.getName() -// + " required for pipeline: " + pType.toString() -// + ", module will not be active."); -// } -// } else { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " does not implement any interface, module will not be active."); -// } -// -// //if file module: check if has public static getDefault() -// if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.FILE_ANALYSIS) { -// try { -// Method getDefaultMethod = moduleClass.getMethod("getDefault"); -// int modifiers = getDefaultMethod.getModifiers(); -// if (!(Modifier.isPublic(modifiers) && Modifier.isStatic(modifiers))) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " does not implement public static getDefault() singleton method."); -// } -// if (!getDefaultMethod.getReturnType().equals(moduleClass)) { -// logger.log(Level.WARNING, "Module class: " + location + " getDefault() singleton method should return the module class instance: " + moduleClass.getName()); -// } -// -// } catch (NoSuchMethodException ex) { -// Exceptions.printStackTrace(ex); -// } catch (SecurityException ex) { -// Exceptions.printStackTrace(ex); -// } -// } //if data source module: check if has public constructor with no args -// else if (pType == IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS) { -// try { -// Constructor constr = moduleClass.getConstructor(); -// int modifiers = constr.getModifiers(); -// if (!Modifier.isPublic(modifiers)) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } -// } catch (NoSuchMethodException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } catch (SecurityException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " lacks a public default constructor."); -// } -// } -// -// } catch (ClassNotFoundException ex) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " not found, module will not be active."); -// -// } catch (LinkageError le) { -// moduleErrors = true; -// logger.log(Level.WARNING, "Module class: " + location + " has unresolved symbols, module will not be active.", le); -// } -// -// -// //validate ordering -// for (int o : orderings.keySet()) { -// int count = orderings.get(o); -// if (count > 1) { -// pipelineErrors = true; -// logger.log(Level.SEVERE, "Pipeline " + pipelineType + " invalid non-unique ordering of modules, order: " + o); -// } -// } -// -// pMod.valid = !moduleErrors; -// logger.log(Level.INFO, "Module " + pMod.location + " valid: " + pMod.valid); -// } //end module -// -// pRaw.valid = !pipelineErrors; -// logger.log(Level.INFO, "Pipeline " + pType + " valid: " + pRaw.valid); -// } //end pipeline -// -// } - -// private Set getJarPaths(String modulesDir) { -// Set urls = new HashSet(); -// -// final File modulesDirF = new File(modulesDir); -// FilenameFilter jarFilter = new FilenameFilter() { -// @Override -// public boolean accept(File dir, String name) { -// return dir.equals(modulesDirF) && name.endsWith(".jar"); -// } -// }; -// File[] dirJars = modulesDirF.listFiles(jarFilter); -// if (dirJars != null) { -// //modules dir exists -// for (int i = 0; i < dirJars.length; ++i) { -// String urlPath = "file:/" + dirJars[i].getAbsolutePath(); -// try { -// urlPath = URLDecoder.decode(urlPath, ENCODING); -// } catch (UnsupportedEncodingException ex) { -// logger.log(Level.SEVERE, "Could not decode file path. ", ex); -// } -// -// try { -// urls.add(new URL(urlPath)); -// //logger.log(Level.INFO, "JAR: " + urlPath); -// } catch (MalformedURLException ex) { -// logger.log(Level.WARNING, "Invalid URL: " + urlPath, ex); -// } -// } -// } -// -// /* -// * netbeans way, but not public API -// org.openide.filesystems.Repository defaultRepository = Repository.getDefault(); -// FileSystem masterFilesystem = defaultRepository.getDefaultFileSystem(); -// org.netbeans.core.startup.ModuleSystem moduleSystem = new org.netbeans.core.startup.ModuleSystem(masterFilesystem); -// List jars = moduleSystem.getModuleJars(); -// for (File jar : jars) { -// logger.log(Level.INFO, " JAR2: " + jar.getAbsolutePath()); -// } -// //org.netbeans.ModuleManager moduleManager = moduleSystem.getManager(); -// */ -// -// return urls; -// } - - /** - * Get jar paths of autodiscovered modules - * - * @param moduleInfos to look into to discover module jar paths - * @return - */ -// private Set getJarPaths(Collection moduleInfos) { -// Set urls = new HashSet(); -// -// //TODO lookup module jar file paths by "seed" class or resource, using the module loader -// //problem: we don't have a reliable "seed" class in every moduke -// //and loading by Bundle.properties resource does not seem to work with the module class loader -// //for now hardcoding jar file locations -// -// /* -// for (ModuleInfo moduleInfo : moduleInfos) { -// -// if (moduleInfo.isEnabled() == false) { -// continue; -// } -// -// String basePackageName = moduleInfo.getCodeNameBase(); -// if (basePackageName.startsWith("org.netbeans") -// || basePackageName.startsWith("org.openide")) { -// //skip -// continue; -// } -// -// -// ClassLoader moduleClassLoader = moduleInfo.getClassLoader(); -// -// URL modURL = moduleClassLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL1 : " + modURL); -// -// modURL = moduleClassLoader.getParent().getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL); -// -// modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL); -// } */ -// /* -// URL modURL = moduleClassLoader.getParent().getResource("Bundle.properties"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// modURL = moduleClassLoader.getResource(basePackageName + ".Bundle.properties"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// modURL = moduleClassLoader.getResource("Bundle"); -// //URL modURL = classLoader.getResource(basePackageName); -// logger.log(Level.INFO, "GOT MOD URL : " + modURL); -// -// Class modClass; -// try { -// modClass = classLoader.loadClass(basePackageName + ".Installer"); -// URL modURL2 = modClass.getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT MOD URL2 : " + modURL2); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// try { -// Class moduleBundleClass = -// Class.forName(basePackageName, false, classLoader); -// URL modURL3 = moduleBundleClass.getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT MOD URL3 : " + modURL3); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// -// -// URL urltry; -// try { -// urltry = moduleClassLoader.loadClass("Bundle").getProtectionDomain().getCodeSource().getLocation(); -// logger.log(Level.INFO, "GOT TRY URL : " + urltry); -// } catch (ClassNotFoundException ex) { -// // Exceptions.printStackTrace(ex); -// } -// -// } -// * */ -// -// //core modules -// urls.addAll(getJarPaths(PlatformUtil.getInstallModulesPath())); -// -// //user modules -// urls.addAll(getJarPaths(PlatformUtil.getUserModulesPath())); -// -// // add other project dirs, such as from external modules -// for (String projectDir : PlatformUtil.getProjectsDirs()) { -// File modules = new File(projectDir + File.separator + "modules"); -// if (modules.exists()) { -// urls.addAll(getJarPaths(modules.getAbsolutePath())); -// } -// } -// -// -// -// return urls; -// } - - List getIngestModuleFactories() { - return moduleFactories; - } - - /** - * Auto-discover ingest modules in all platform modules that are "enabled" - * If discovered ingest module is not already in XML config, add it do - * config and add to in-memory pipeline. - * - * @throws IngestModuleLoaderException - */ -// @SuppressWarnings("unchecked") - private void autodiscover() /*throws IngestModuleLoaderException*/ { - + private void init() { + // RJCTODO: Add code to listen to changes in the collections, possibly restore listener code... + // RJCTODO: Since we were going to overwrite pipeline config every time and we are going to move the code modules + // into this package, we can simply handle the module ordering here, possibly just directly instantiating the core + // modules. + Logger logger = Logger.getLogger(IngestModuleLoader.class.getName()); Collection factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class); for (IngestModuleFactory factory : factories) { logger.log(Level.INFO, "Loaded ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()}); moduleFactories.add(factory); - } - -// // Use Lookup to find the other NBM modules. We'll later search them for ingest modules -// Collection moduleInfos = Lookup.getDefault().lookupAll(ModuleInfo.class); -// logger.log(Level.INFO, "Autodiscovery, found #platform modules: " + moduleInfos.size()); -// -// Set urls = getJarPaths(moduleInfos); -// ArrayList reflectionsSet = new ArrayList<>(); -// -// for (final ModuleInfo moduleInfo : moduleInfos) { -// if (moduleInfo.isEnabled()) { -// /* NOTE: We have an assumption here that the modules in an NBM will -// * have the same package name as the NBM name. This means that -// * an NBM can have only one package with modules in it. */ -// String basePackageName = moduleInfo.getCodeNameBase(); -// -// // skip the standard ones -// if (basePackageName.startsWith("org.netbeans") -// || basePackageName.startsWith("org.openide")) { -// continue; -// } -// -// logger.log(Level.INFO, "Found module: " + moduleInfo.getDisplayName() + " " + basePackageName -// + " Build version: " + moduleInfo.getBuildVersion() -// + " Spec version: " + moduleInfo.getSpecificationVersion() -// + " Impl version: " + moduleInfo.getImplementationVersion()); -// -// ConfigurationBuilder cb = new ConfigurationBuilder(); -// cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix(basePackageName))); -// cb.setUrls(urls); -// cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); -// reflectionsSet.add(new Reflections(cb)); -// } -// else { -// // log if we have our own modules disabled -// if (moduleInfo.getCodeNameBase().startsWith("org.sleuthkit")) { -// logger.log(Level.WARNING, "Sleuth Kit Module not enabled: " + moduleInfo.getDisplayName()); -// } -// } -// } - - /* This area is used to load the example modules. They are not found via lookup since they - * are in this NBM module. - * Uncomment this section to rum the examples. - */ - /* - ConfigurationBuilder cb = new ConfigurationBuilder(); - cb.filterInputsBy(new FilterBuilder().include(FilterBuilder.prefix("org.sleuthkit.autopsy.examples"))); - cb.setUrls(urls); - cb.setScanners(new SubTypesScanner(), new ResourcesScanner()); - reflectionsSet.add(new Reflections(cb)); - */ - -// for (Reflections reflections : reflectionsSet) { -// -// Set fileModules = reflections.getSubTypesOf(IngestModuleAbstractFile.class); -// Iterator it = fileModules.iterator(); -// while (it.hasNext()) { -// logger.log(Level.INFO, "Found file ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); -// } -// -// Set dataSourceModules = reflections.getSubTypesOf(IngestModuleDataSource.class); -// it = dataSourceModules.iterator(); -// while (it.hasNext()) { -// logger.log(Level.INFO, "Found DataSource ingest module in: " + reflections.getClass().getSimpleName() + ": " + it.next().toString()); -// } -// -// if ((fileModules.isEmpty()) && (dataSourceModules.isEmpty())) { -// logger.log(Level.INFO, "Module has no ingest modules: " + reflections.getClass().getSimpleName()); -// continue; -// } -// -// //find out which modules to add -// //TODO check which modules to remove (which modules were uninstalled) -// boolean modulesChanged = false; -// -// it = fileModules.iterator(); -// while (it.hasNext()) { -// boolean exists = false; -// Class foundClass = (Class) it.next(); -// -// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { -// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS.toString())) { -// continue; //skip -// } -// -// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { -// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); -// if (foundClass.getName().equals(rawM.location)) { -// exists = true; -// break; -// } -// } -// if (exists == true) { -// break; -// } -// } -// -// if (exists == false) { -// logger.log(Level.INFO, "Discovered a new file module to load: " + foundClass.getName()); -// //ADD MODULE -// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.FILE_ANALYSIS); -// modulesChanged = true; -// } -// -// } -// -// it = dataSourceModules.iterator(); -// while (it.hasNext()) { -// boolean exists = false; -// Class foundClass = (Class) it.next(); -// -// for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) { -// if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS.toString())) { -// continue; //skip -// } -// -// -// for (IngestModuleLoader.XmlModuleRaw rawM : rawP.modules) { -// //logger.log(Level.INFO, "CLASS NAME : " + foundClass.getName()); -// if (foundClass.getName().equals(rawM.location)) { -// exists = true; -// break; -// } -// } -// if (exists == true) { -// break; -// } -// } -// -// if (exists == false) { -// logger.log(Level.INFO, "Discovered a new DataSource module to load: " + foundClass.getName()); -// //ADD MODULE -// addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS); -// modulesChanged = true; -// } -// -// } -// -// if (modulesChanged) { -// save(); -// -// try { -// pcs.firePropertyChange(IngestModuleLoader.Event.ModulesReloaded.toString(), 0, 1); -// } -// catch (Exception e) { -// logger.log(Level.SEVERE, "IngestModuleLoader listener threw exception", e); -// MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to IngestModuleLoader updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); -// } -// } -// } + } } - /** - * Set a new order of the module - * - * @param pipeLineType pipeline type where the module to reorder is present - * @param moduleLocation loaded module name (location), fully qualified - * class path - * @param newOrder new order to set - */ -// void setModuleOrder(IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipeLineType, String moduleLocation, int newOrder) throws IngestModuleLoaderException { -// throw new IngestModuleLoaderException("Not yet implemented"); -// } - - /** - * add autodiscovered module to raw pipeline to be validated and - * instantiated - * - * @param moduleClass - * @param pipelineType - */ -// private void addModuleToRawPipeline(Class moduleClass, IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE pipelineType) throws IngestModuleLoaderException { -// String moduleLocation = moduleClass.getName(); -// -// IngestModuleLoader.IngestModuleXMLDescriptor modRaw = new IngestModuleLoader.IngestModuleXMLDescriptor(); -// modRaw.arguments = ""; //default, no arguments -// modRaw.location = moduleLocation; -// modRaw.order = Integer.MAX_VALUE - (numModDiscovered++); //add to end -// modRaw.type = IngestModuleLoader.IngestModuleXMLDescriptor.MODULE_TYPE.PLUGIN.toString(); -// modRaw.valid = false; //to be validated -// -// //save the current numModDiscovered -// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); -// -// //find the pipeline of that type -// IngestModuleLoader.IngestPipelineXMLDescriptor pipeline = null; -// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { -// if (rawP.type.equals(pipelineType.toString())) { -// pipeline = rawP; -// break; -// } -// } -// if (pipeline == null) { -// throw new IngestModuleLoaderException("Could not find expected pipeline of type: " + pipelineType.toString() + ", cannot add autodiscovered module: " + moduleLocation); -// } else { -// pipeline.modules.add(modRaw); -// logger.log(Level.INFO, "Added a new module " + moduleClass.getName() + " to pipeline " + pipelineType.toString()); -// } -// } - - /** - * Register a listener for module install/uninstall //TODO ensure that - * module is actually loadable when Lookup event is fired - */ -// private void registerModulesChange() { -// final Lookup.Result result = -// Lookup.getDefault().lookupResult(ModuleInfo.class); -// result.addLookupListener(new LookupListener() { -// @Override -// public void resultChanged(LookupEvent event) { -// try { -// logger.log(Level.INFO, "Module change occured, reloading."); -// init(); -// } catch (IngestModuleLoaderException ex) { -// logger.log(Level.SEVERE, "Error reloading the module loader. ", ex); -// } -// } -// }); -// } - - // RJCTODO: This is not used - /** - * Save the current in memory pipeline config, including autodiscovered - * modules - * - * @throws IngestModuleLoaderException - */ -// public void save() throws IngestModuleLoaderException { -// DocumentBuilderFactory dbfac = DocumentBuilderFactory.newInstance(); -// -// try { -// DocumentBuilder docBuilder = dbfac.newDocumentBuilder(); -// Document doc = docBuilder.newDocument(); -// -// -// Comment comment = doc.createComment("Saved by: " + getClass().getName() -// + " on: " + dateFormatter.format(System.currentTimeMillis())); -// doc.appendChild(comment); -// Element rootEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_ROOT); -// doc.appendChild(rootEl); -// -// for (IngestModuleLoader.IngestPipelineXMLDescriptor rawP : this.pipelinesXML) { -// Element pipelineEl = doc.createElement(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); -// pipelineEl.setAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR, rawP.type); -// rootEl.appendChild(pipelineEl); -// -// for (IngestModuleLoader.IngestModuleXMLDescriptor rawM : rawP.modules) { -// Element moduleEl = doc.createElement(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); -// -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR, rawM.location); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR, Integer.toString(rawM.order)); -// moduleEl.setAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR, rawM.type); -// -// pipelineEl.appendChild(moduleEl); -// } -// } -// -// XMLUtil.saveDoc(IngestModuleLoader.class, absFilePath, ENCODING, doc); -// logger.log(Level.INFO, "Pipeline configuration saved to: " + this.absFilePath); -// } catch (ParserConfigurationException e) { -// logger.log(Level.SEVERE, "Error saving pipeline config XML: can't initialize parser.", e); -// } -// -// } - - /** - * Instantiate valid pipeline and modules and store the module object - * references - * - * @throws IngestModuleLoaderException - */ -// @SuppressWarnings("unchecked") - private void instantiate() /*throws IngestModuleLoaderException*/ { - - //clear current -// filePipeline.clear(); -// dataSourcePipeline.clear(); - - //add autodiscovered modules to pipelinesXML - autodiscover(); - - //validate all modules: from XML + just autodiscovered - -// validate(); -// -// for (IngestModuleLoader.XmlPipelineRaw pRaw : pipelinesXML) { -// if (pRaw.valid == false) { -// //skip invalid pipelines -// continue; -// } -// -// //sort modules by order parameter, in case XML order is different -// Collections.sort(pRaw.modules, new Comparator() { -// @Override -// public int compare(IngestModuleLoader.XmlModuleRaw o1, IngestModuleLoader.XmlModuleRaw o2) { -// return Integer.valueOf(o1.order).compareTo(Integer.valueOf(o2.order)); -// } -// }); -// -// //check pipelineType, add to right pipeline collection -// IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE pType = IngestModuleLoader.XmlPipelineRaw.getPipelineType(pRaw.type); -// -// for (IngestModuleLoader.XmlModuleRaw pMod : pRaw.modules) { -// try { -// if (pMod.valid == false) { -// //skip invalid modules -// continue; -// } -// -// //add to right pipeline -// switch (pType) { -// case FILE_ANALYSIS: -// IngestModuleAbstractFile fileModuleInstance = null; -// final Class fileModuleClass = -// (Class) Class.forName(pMod.location, true, classLoader); -// try { -// Method getDefaultMethod = fileModuleClass.getMethod("getDefault"); -// if (getDefaultMethod != null) { -// fileModuleInstance = (IngestModuleAbstractFile) getDefaultMethod.invoke(null); -// } -// } catch (NoSuchMethodException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (SecurityException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (IllegalAccessException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } catch (InvocationTargetException ex) { -// logger.log(Level.WARNING, "Validated module, but not public getDefault() found: " + pMod.location); -// pMod.valid = false; //prevent from trying to load again -// } -// -// filePipeline.add(fileModuleInstance); -// break; -// case DATA_SOURCE_ANALYSIS: -// final Class dataSourceModuleClass = -// (Class) Class.forName(pMod.location, true, classLoader); -// -// try { -// Constructor constr = dataSourceModuleClass.getConstructor(); -// IngestModuleDataSource dataSourceModuleInstance = constr.newInstance(); -// -// if (dataSourceModuleInstance != null) { -// dataSourcePipeline.add(dataSourceModuleInstance); -// } -// -// } catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) { -// logger.log(Level.WARNING, "Validated module, could not initialize, check for bugs in the module: " + pMod.location, ex); -// pMod.valid = false; -// } -// -// -// break; -// default: -// logger.log(Level.SEVERE, "Unexpected pipeline type to add module to: " + pType); -// } -// -// -// } catch (ClassNotFoundException ex) { -// logger.log(Level.SEVERE, "Validated module, but could not load (shouldn't happen): " + pMod.location); -// } -// } -// -// } //end instantiating modules in XML - } - - /** - * Get a new instance of the module or null if could not be created - * - * @param module existing module to get an instance of - * @return new module instance or null if could not be created - */ -// IngestModuleAbstract getNewIngestModuleInstance(IngestModuleAbstract module) { -// try { -// IngestModuleAbstract newInstance = module.getClass().newInstance(); -// return newInstance; -// } catch (InstantiationException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); -// return null; -// } catch (IllegalAccessException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + module.getName(), e); -// return null; -// } -// -// } - -// private IngestModuleAbstract getNewIngestModuleInstance(Class moduleClass) { -// try { -// IngestModuleAbstract newInstance = moduleClass.newInstance(); -// return newInstance; -// } catch (InstantiationException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); -// return null; -// } catch (IllegalAccessException e) { -// logger.log(Level.SEVERE, "Cannot instantiate module: " + moduleClass.getName(), e); -// return null; -// } -// -// } - - /** - * Load XML into raw pipeline representation - * - * @throws IngestModuleLoaderException - */ -// private void loadRawPipeline() throws IngestModuleLoaderException { -// final Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, absFilePath, XSDFILE); -// if (doc == null) { -// throw new IngestModuleLoaderException("Could not load pipeline config XML: " + this.absFilePath); -// } -// Element root = doc.getDocumentElement(); -// if (root == null) { -// String msg = "Error loading pipeline configuration: invalid file format."; -// logger.log(Level.SEVERE, msg); -// throw new IngestModuleLoaderException(msg); -// } -// NodeList pipelineNodes = root.getElementsByTagName(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_EL); -// int numPipelines = pipelineNodes.getLength(); -// if (numPipelines == 0) { -// throw new IngestModuleLoaderException("No pipelines found in the pipeline configuration: " + absFilePath); -// } -// for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { -// //process pipelines -// Element pipelineEl = (Element) pipelineNodes.item(pipelineNum); -// final String pipelineType = pipelineEl.getAttribute(IngestModuleLoader.IngestPipelineXMLDescriptor.XML_PIPELINE_TYPE_ATTR); -// logger.log(Level.INFO, "Found pipeline type: " + pipelineType); -// -// IngestModuleLoader.IngestPipelineXMLDescriptor pipelineRaw = new IngestModuleLoader.IngestPipelineXMLDescriptor(); -// pipelineRaw.type = pipelineType; -// this.pipelinesXML.add(pipelineRaw); -// -// //process modules -// NodeList modulesNodes = pipelineEl.getElementsByTagName(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_EL); -// int numModules = modulesNodes.getLength(); -// if (numModules == 0) { -// logger.log(Level.WARNING, "Pipeline: " + pipelineType + " has no modules defined."); -// } -// for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { -// //process modules -// Element moduleEl = (Element) modulesNodes.item(moduleNum); -// final String moduleType = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_TYPE_ATTR); -// final String moduleOrder = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ORDER_ATTR); -// final String moduleLoc = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_LOC_ATTR); -// final String moduleArgs = moduleEl.getAttribute(IngestModuleLoader.IngestModuleXMLDescriptor.XML_MODULE_ARGS_ATTR); -// IngestModuleLoader.IngestModuleXMLDescriptor module = new IngestModuleLoader.IngestModuleXMLDescriptor(); -// module.arguments = moduleArgs; -// module.location = moduleLoc; -// try { -// module.order = Integer.parseInt(moduleOrder); -// } catch (NumberFormatException e) { -// logger.log(Level.WARNING, "Invalid module order, need integer: " + moduleOrder + ", adding to end of the list"); -// module.order = Integer.MAX_VALUE - (numModDiscovered++); -// //save the current numModDiscovered -// ModuleSettings.setConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING, Integer.toString(numModDiscovered)); -// -// } -// module.type = moduleType; -// pipelineRaw.modules.add(module); -// } -// -// } -// -// } - - /** - * Load and validate XML pipeline, autodiscover and instantiate the pipeline - * modules Can be called multiple times to refresh the view of modules - * - * @throws IngestModuleLoaderException - */ - public synchronized void init() /*throws IngestModuleLoaderException*/ { -// absFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINE_CONFIG_XML; -// ClassLoader parentClassLoader = Lookup.getDefault().lookup(ClassLoader.class); -// classLoader = new CustomClassLoader(parentClassLoader); -// -// try { -// boolean extracted = PlatformUtil.extractResourceToUserConfigDir(IngestModuleLoader.class, PIPELINE_CONFIG_XML); -// } catch (IOException ex) { -// logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir ", ex); -// } -// -// //load the pipeline config -// loadRawPipeline(); - - instantiate(); - - - } - -// private static final class IngestPipelineXMLDescriptor { -// -// enum PIPELINE_TYPE implements IngestModuleMapping { -// -// FILE_ANALYSIS { -// @Override -// public String toString() { -// return "FileAnalysis"; -// } -// -// @Override -// public Class getIngestModuleInterface() { -// return IngestModuleAbstractFile.class; -// } -// }, -// DATA_SOURCE_ANALYSIS { -// @Override -// public String toString() { -// return "ImageAnalysis"; -// } -// -// @Override -// public Class getIngestModuleInterface() { -// return IngestModuleDataSource.class; -// } -// },; -// } -// -// /** -// * get pipeline type for string mapping to type toString() method -// * -// * @param s string equals to one of the types toString() representation -// * @return matching type -// */ -// static IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE getPipelineType(String s) throws IllegalArgumentException { -// IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE[] types = IngestModuleLoader.IngestPipelineXMLDescriptor.PIPELINE_TYPE.values(); -// for (int i = 0; i < types.length; ++i) { -// if (types[i].toString().equals(s)) { -// return types[i]; -// } -// } -// throw new IllegalArgumentException("No PIPELINE_TYPE for string: " + s); -// } -// private static final String XML_PIPELINE_ROOT = "PIPELINE_CONFIG"; -// private static final String XML_PIPELINE_EL = "PIPELINE"; -// private static final String XML_PIPELINE_TYPE_ATTR = "type"; -// String type; -// List modules = new ArrayList<>(); -// boolean valid = false; // if passed validation -// } -// -// private static class IngestModuleXMLDescriptor { -// -// enum MODULE_TYPE { -// PLUGIN { -// @Override -// public String toString() { -// return "plugin"; -// } -// }; -// } -// -// private static final String XML_MODULE_EL = "MODULE"; -// private static final String XML_MODULE_ORDER_ATTR = "order"; -// private static final String XML_MODULE_TYPE_ATTR = "type"; -// private static final String XML_MODULE_LOC_ATTR = "location"; -// private static final String XML_MODULE_ARGS_ATTR = "arguments"; -// int order; -// String type; -// String location; -// String arguments; -// boolean valid = false; // if passed validation -// } -//} - -/** - * Exception thrown when errors occur while loading modules - */ -//class IngestModuleLoaderException extends Throwable { -// -// public IngestModuleLoaderException(String message) { -// super(message); -// } -// -// public IngestModuleLoaderException(String message, Throwable cause) { -// super(message, cause); -// } -//} - -/** - * Implements mapping of a type to ingest module interface type - */ -//interface IngestModuleMapping { -// -// /** -// * Get ingest module interface mapped to that type -// * -// * @return ingest module interface meta type -// */ -// public Class getIngestModuleInterface(); -//} - -/** - * Custom class loader that attempts to force class resolution / linkage validation at loading - */ -//class CustomClassLoader extends ClassLoader { -// private static final Logger logger = Logger.getLogger(CustomClassLoader.class.getName()); -// -// CustomClassLoader(ClassLoader parent) { -// super(parent); -// } -// -// -// @Override -// public Class loadClass(String name) throws ClassNotFoundException { -// logger.log(Level.INFO, "Custom loading class: " + name); -// -// Class cl = super.loadClass(name, true); -// -// return cl; -// } -} + List getIngestModuleFactories() { + return moduleFactories; + } +} \ No newline at end of file diff --git a/ExifParser/nbproject/project.xml b/ExifParser/nbproject/project.xml index da91e0b898..15439ab1c2 100644 --- a/ExifParser/nbproject/project.xml +++ b/ExifParser/nbproject/project.xml @@ -6,6 +6,14 @@ org.sleuthkit.autopsy.exifparser + + org.openide.util.lookup + + + + 8.19.1 + + org.sleuthkit.autopsy.core diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index 49bc81e4ea..036b6be06c 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2013 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,229 +16,193 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -//package org.sleuthkit.autopsy.exifparser; -// -//import com.drew.imaging.ImageMetadataReader; -//import com.drew.imaging.ImageProcessingException; -//import com.drew.lang.GeoLocation; -//import com.drew.lang.Rational; -//import com.drew.metadata.Metadata; -//import com.drew.metadata.exif.ExifIFD0Directory; -//import com.drew.metadata.exif.ExifSubIFDDirectory; -//import com.drew.metadata.exif.GpsDirectory; -//import java.io.BufferedInputStream; -//import java.io.IOException; -//import java.io.InputStream; -//import java.util.ArrayList; -//import java.util.Collection; -//import java.util.Date; -//import java.util.logging.Level; -//import org.sleuthkit.autopsy.coreutils.ImageUtils; -//import org.sleuthkit.autopsy.coreutils.Logger; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.ModuleDataEvent; -//import org.sleuthkit.datamodel.AbstractFile; -//import org.sleuthkit.datamodel.BlackboardArtifact; -//import org.sleuthkit.datamodel.BlackboardAttribute; -//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; -//import org.sleuthkit.datamodel.ReadContentInputStream; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData; -//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; + +package org.sleuthkit.autopsy.exifparser; + +import com.drew.imaging.ImageMetadataReader; +import com.drew.imaging.ImageProcessingException; +import com.drew.lang.GeoLocation; +import com.drew.lang.Rational; +import com.drew.metadata.Metadata; +import com.drew.metadata.exif.ExifIFD0Directory; +import com.drew.metadata.exif.ExifSubIFDDirectory; +import com.drew.metadata.exif.GpsDirectory; +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.ImageUtils; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.autopsy.ingest.ModuleDataEvent; +import org.sleuthkit.datamodel.AbstractFile; +import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; +import org.sleuthkit.datamodel.ReadContentInputStream; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; /** * Ingest module to parse image Exif metadata. Currently only supports JPEG * files. Ingests an image file and, if available, adds it's date, latitude, * longitude, altitude, device model, and device make to a blackboard artifact. */ -//public final class ExifParserFileIngestModule extends IngestModuleAbstractFile { -// -// private IngestServices services; -// final public static String MODULE_NAME = "Exif Parser"; -// final public static String MODULE_VERSION = Version.getVersion(); -// private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); -// private static ExifParserFileIngestModule defaultInstance = null; -// private int filesProcessed = 0; -// private boolean filesToFire = false; -// -// //file ingest modules require a private constructor -// //to ensure singleton instances -// private ExifParserFileIngestModule() { -// } -// -// //default instance used for module registration -// public static synchronized ExifParserFileIngestModule getDefault() { -// if (defaultInstance == null) { -// defaultInstance = new ExifParserFileIngestModule(); -// } -// return defaultInstance; -// } -// -// @Override -// public IngestModuleAbstractFile.ProcessResult process(PipelineContext pipelineContext, AbstractFile content) { -// -// //skip unalloc -// if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// // skip known -// if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// // update the tree every 1000 files if we have EXIF data that is not being being displayed -// filesProcessed++; -// if ((filesToFire) && (filesProcessed % 1000 == 0)) { -// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); -// filesToFire = false; -// } -// -// //skip unsupported -// if (!parsableFormat(content)) { -// return IngestModuleAbstractFile.ProcessResult.OK; -// } -// -// return processFile(content); -// } -// -// public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) { -// InputStream in = null; -// BufferedInputStream bin = null; -// -// try { -// in = new ReadContentInputStream(f); -// bin = new BufferedInputStream(in); -// -// Collection attributes = new ArrayList(); -// Metadata metadata = ImageMetadataReader.readMetadata(bin, true); -// -// // Date -// ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); -// if (exifDir != null) { -// Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); -// if (date != null) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000)); -// } -// } -// -// // GPS Stuff -// GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); -// if (gpsDir != null) { -// GeoLocation loc = gpsDir.getGeoLocation(); -// if (loc != null) { -// double latitude = loc.getLatitude(); -// double longitude = loc.getLongitude(); -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude)); -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude)); -// } -// -// Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); -// if (altitude != null) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue())); -// } -// } -// -// // Device info -// ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); -// if (devDir != null) { -// String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); -// if (model != null && !model.isEmpty()) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model)); -// } -// -// String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); -// if (make != null && !make.isEmpty()) { -// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make)); -// } -// } -// -// // Add the attributes, if there are any, to a new artifact -// if (!attributes.isEmpty()) { -// BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); -// bba.addAttributes(attributes); -// filesToFire = true; -// } -// -// return IngestModuleAbstractFile.ProcessResult.OK; -// -// } catch (TskCoreException ex) { -// logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ")."); -// } catch (ImageProcessingException ex) { -// logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")"); -// } catch (IOException ex) { -// logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); -// } finally { -// try { -// if (in != null) { -// in.close(); -// } -// if (bin != null) { -// bin.close(); -// } -// } catch (IOException ex) { -// logger.log(Level.WARNING, "Failed to close InputStream.", ex); -// } -// } -// -// // If we got here, there was an error -// return IngestModuleAbstractFile.ProcessResult.ERROR; -// } -// -// /** -// * Checks if should try to attempt to extract exif. Currently checks if JPEG -// * image (by signature) -// * -// * @param f file to be checked -// * -// * @return true if to be processed -// */ -// private boolean parsableFormat(AbstractFile f) { -// return ImageUtils.isJpegFileHeader(f); -// } -// -// @Override -// public void complete() { -// logger.log(Level.INFO, "completed exif parsing " + this.toString()); -// if (filesToFire) { -// //send the final new data event -// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); -// } -// } -// -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// @Override -// public String getName() { -// return "Exif Image Parser"; -// } -// -// @Override -// public String getDescription() { -// return "Ingests JPEG files and retrieves their EXIF metadata."; -// } -// -// @Override -// public void init(IngestModuleInit initContext) { -// services = IngestServices.getDefault(); -// logger.log(Level.INFO, "init() " + this.toString()); -// -// filesProcessed = 0; -// filesToFire = false; -// } -// -// @Override -// public void stop() { -// } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// return false; -// } -//} \ No newline at end of file +public final class ExifParserFileIngestModule implements FileIngestModule { + + private IngestServices services; + private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); + private int filesProcessed = 0; + private boolean filesToFire = false; + + ExifParserFileIngestModule() { + } + + @Override + public String getDisplayName() { + return ExifParserModuleFactory.getModuleName(); + } + + @Override + public void init(long taskId) { + services = IngestServices.getDefault(); + logger.log(Level.INFO, "init() {0}", this.toString()); + filesProcessed = 0; + filesToFire = false; + } + + @Override + public void process(AbstractFile content) { + + //skip unalloc + if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { + return; + } + + // skip known + if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { + return; + } + + // update the tree every 1000 files if we have EXIF data that is not being being displayed + filesProcessed++; + if ((filesToFire) && (filesProcessed % 1000 == 0)) { + services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + filesToFire = false; + } + + //skip unsupported + if (!parsableFormat(content)) { + return; + } + + processFile(content); + } + + public void processFile(AbstractFile f) { + InputStream in = null; + BufferedInputStream bin = null; + + try { + in = new ReadContentInputStream(f); + bin = new BufferedInputStream(in); + + Collection attributes = new ArrayList(); + Metadata metadata = ImageMetadataReader.readMetadata(bin, true); + + // Date + ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class); + if (exifDir != null) { + Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL); + if (date != null) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), getDisplayName(), date.getTime() / 1000)); + } + } + + // GPS Stuff + GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class); + if (gpsDir != null) { + GeoLocation loc = gpsDir.getGeoLocation(); + if (loc != null) { + double latitude = loc.getLatitude(); + double longitude = loc.getLongitude(); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), getDisplayName(), latitude)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), getDisplayName(), longitude)); + } + + Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE); + if (altitude != null) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), getDisplayName(), altitude.doubleValue())); + } + } + + // Device info + ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class); + if (devDir != null) { + String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); + if (model != null && !model.isEmpty()) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), getDisplayName(), model)); + } + + String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); + if (make != null && !make.isEmpty()) { + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), getDisplayName(), make)); + } + } + + // Add the attributes, if there are any, to a new artifact + if (!attributes.isEmpty()) { + BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF); + bba.addAttributes(attributes); + filesToFire = true; + } + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage()); + } catch (ImageProcessingException ex) { + logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()}); + } catch (IOException ex) { + logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); + } finally { + try { + if (in != null) { + in.close(); + } + if (bin != null) { + bin.close(); + } + } catch (IOException ex) { + logger.log(Level.WARNING, "Failed to close InputStream.", ex); + } + } + } + + /** + * Checks if should try to attempt to extract exif. Currently checks if JPEG + * image (by signature) + * + * @param f file to be checked + * + * @return true if to be processed + */ + private boolean parsableFormat(AbstractFile f) { + return ImageUtils.isJpegFileHeader(f); + } + + @Override + public void complete() { + logger.log(Level.INFO, "completed exif parsing {0}", this.toString()); + if (filesToFire) { + //send the final new data event + services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + } + } + + @Override + public void stop() { + } +} \ No newline at end of file diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java new file mode 100755 index 0000000000..9595171390 --- /dev/null +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java @@ -0,0 +1,62 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.exifparser; + +import java.io.Serializable; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do hash database lookups. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class ExifParserModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return "Exif Image Parser"; + } + + @Override + public String getModuleDescription() { + return "Ingests JPEG files and retrieves their EXIF metadata."; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new ExifParserFileIngestModule(); + } +} diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 56da384573..3ba5f954df 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -66,7 +66,7 @@ public class HashDbIngestModule implements FileIngestModule { } @Override - public void init(long dataSourceTaskId) { + public void init(long taskId) { services = IngestServices.getDefault(); skCase = Case.getCurrentCase().getSleuthkitCase(); @@ -76,22 +76,20 @@ public class HashDbIngestModule implements FileIngestModule { calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes(); if (knownHashSets.isEmpty()) { - // RJCTODO -// services.postMessage(IngestMessage.createWarningMessage(++messageId, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.noKnownHashDbSetMsg"), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); } if (knownBadHashSets.isEmpty()) { - // RJCTODO -// services.postMessage(IngestMessage.createWarningMessage(++messageId, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.noKnownBadHashDbSetMsg"), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); + services.postMessage(IngestMessage.createWarningMessage(++messageId, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownBadHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); } } @@ -122,7 +120,6 @@ public class HashDbIngestModule implements FileIngestModule { // bail out if we have no hashes set if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) { -// return ProcessResult.OK; return; } @@ -136,14 +133,14 @@ public class HashDbIngestModule implements FileIngestModule { calctime += (System.currentTimeMillis() - calcstart); } catch (IOException ex) { logger.log(Level.WARNING, "Error calculating hash of file " + name, ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.fileReadErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.calcHashValueErr", -// name))); + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.fileReadErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.calcHashValueErr", + name))); // return ProcessResult.ERROR; return; } @@ -163,14 +160,14 @@ public class HashDbIngestModule implements FileIngestModule { skCase.setKnown(file, TskData.FileKnown.BAD); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.settingKnownBadStateErr", -// name))); + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.settingKnownBadStateErr", + name))); // ret = ProcessResult.ERROR; } String hashSetName = db.getHashSetName(); @@ -194,15 +191,16 @@ public class HashDbIngestModule implements FileIngestModule { lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.lookingUpKnownBadHashValueErr", -// name))); -// ret = ProcessResult.ERROR; + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.lookingUpKnownBadHashValueErr", + name))); +// RJCTODO + // ret = ProcessResult.ERROR; } } @@ -219,29 +217,23 @@ public class HashDbIngestModule implements FileIngestModule { break; } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.settingsKnownStateErr", -// name))); -// ret = ProcessResult.ERROR; + // RJCTODO + // ret = ProcessResult.ERROR; } } lookuptime += (System.currentTimeMillis() - lookupstart); } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex); -// services.postMessage(IngestMessage.createErrorMessage(++messageId, -// HashDbIngestModule.this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.hashLookupErrorMsg", -// name), -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.lookingUpKnownHashValueErr", -// name))); -// ret = ProcessResult.ERROR; + services.postMessage(IngestMessage.createErrorMessage(++messageId, + HashDbIngestModule.this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.hashLookupErrorMsg", + name), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.lookingUpKnownHashValueErr", + name))); + // RJCTODO + // ret = ProcessResult.ERROR; } } } @@ -294,13 +286,13 @@ public class HashDbIngestModule implements FileIngestModule { detailsSb.append(""); -// services.postMessage(IngestMessage.createDataMessage(++messageId, this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.postToBB.knownBadMsg", -// abstractFile.getName()), -// detailsSb.toString(), -// abstractFile.getName() + md5Hash, -// badFile)); + services.postMessage(IngestMessage.createDataMessage(++messageId, this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.postToBB.knownBadMsg", + abstractFile.getName()), + detailsSb.toString(), + abstractFile.getName() + md5Hash, + badFile)); } services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile))); } catch (TskException ex) { @@ -337,12 +329,12 @@ public class HashDbIngestModule implements FileIngestModule { } detailsSb.append(""); -// services.postMessage(IngestMessage.createMessage(++messageId, -// IngestMessage.MessageType.INFO, -// this, -// NbBundle.getMessage(this.getClass(), -// "HashDbIngestModule.complete.hashLookupResults"), -// detailsSb.toString())); + services.postMessage(IngestMessage.createMessage(++messageId, + IngestMessage.MessageType.INFO, + this, + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.complete.hashLookupResults"), + detailsSb.toString())); } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index fc4679ae0e..0ffb87527c 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -50,10 +50,10 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -import org.sleuthkit.autopsy.ingest.IngestModuleInit; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -77,7 +77,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown; * * Registered as a module in layer.xml */ -public final class KeywordSearchIngestModule { +public final class KeywordSearchIngestModule implements FileIngestModule { enum UpdateFrequency { @@ -102,7 +102,6 @@ public final class KeywordSearchIngestModule { public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleDescription"); final public static String MODULE_VERSION = Version.getVersion(); - private static KeywordSearchIngestModule instance = null; private IngestServices services; private Ingester ingester = null; private volatile boolean commitIndex = false; //whether to commit index next time @@ -146,28 +145,110 @@ public final class KeywordSearchIngestModule { }; private Map ingestStatus; - //private constructor to ensure singleton instance - private KeywordSearchIngestModule() { + KeywordSearchIngestModule() { } /** - * Returns singleton instance of the module, creates one if needed + * Initializes the module for new ingest run Sets up threads, timers, + * retrieves settings, keyword lists to run on * - * @return instance of the module */ - public static synchronized KeywordSearchIngestModule getDefault() { - if (instance == null) { - instance = new KeywordSearchIngestModule(); - } - return instance; - } + @Override + public void init(long taskId) { + logger.log(Level.INFO, "init()"); + services = IngestServices.getDefault(); + initialized = false; + caseHandle = Case.getCurrentCase().getSleuthkitCase(); + + tikaFormatDetector = new Tika(); + + ingester = Server.getIngester(); + + final Server server = KeywordSearch.getServer(); + try { + if (!server.isRunning()) { + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + logger.log(Level.SEVERE, msg); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + + } + } catch (KeywordSearchModuleException ex) { + logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); + //this means Solr is not properly initialized + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + } + + + //initialize extractors + stringExtractor = new AbstractFileStringExtract(); + stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); + stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); + + + //log the scripts used for debugging + final StringBuilder sbScripts = new StringBuilder(); + for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { + sbScripts.append(s.name()).append(" "); + } + logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString()); + + textExtractors = new ArrayList<>(); + //order matters, more specific extractors first + textExtractors.add(new AbstractFileHtmlExtract()); + textExtractors.add(new AbstractFileTikaTextExtract()); + + + ingestStatus = new HashMap<>(); + + keywords = new ArrayList<>(); + keywordLists = new ArrayList<>(); + keywordToList = new HashMap<>(); + + initKeywords(); + + if (keywords.isEmpty() || keywordLists.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); + } + + processedFiles = false; + finalSearcherDone = false; + searcherDone = true; //make sure to start the initial currentSearcher + //keeps track of all results per run not to repeat reporting the same hits + currentResults = new HashMap<>(); + + curDataSourceIds = new HashSet<>(); + + indexer = new Indexer(); + + final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; + logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs); + logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs); + + commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); + searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); + + initialized = true; + + commitTimer.start(); + searchTimer.start(); + } + + @Override public void process(AbstractFile abstractFile) { if (initialized == false) //error initializing indexing/Solr { - logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName()); + logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + return; + // RJCTODO // return ProcessResult.OK; } try { @@ -181,9 +262,12 @@ public final class KeywordSearchIngestModule { if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { //skip indexing of virtual dirs (no content, no real name) - will index children files + return; + // RJCTODO // return ProcessResult.OK; } + // RJCTODO //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it // if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { // indexer.indexFile(abstractFile, false); @@ -195,7 +279,8 @@ public final class KeywordSearchIngestModule { if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { //index meta-data only indexer.indexFile(abstractFile, false); -// return ProcessResult.OK; + // RJCTODO + // return ProcessResult.OK; } processedFiles = true; @@ -206,6 +291,7 @@ public final class KeywordSearchIngestModule { //index the file and content (if the content is supported) indexer.indexFile(abstractFile, true); + // RJCTODO // return ProcessResult.OK; } @@ -213,6 +299,7 @@ public final class KeywordSearchIngestModule { * After all files are ingested, execute final index commit and final search * Cleanup resources, threads, timers */ + @Override public void complete() { if (initialized == false) { return; @@ -249,12 +336,10 @@ public final class KeywordSearchIngestModule { try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks(); - logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles); - logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks); - } catch (NoOpenCoreException ex) { + logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles); + logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks); + } catch (NoOpenCoreException | KeywordSearchModuleException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se); } //cleanup done in final searcher @@ -265,6 +350,7 @@ public final class KeywordSearchIngestModule { /** * Handle stop event (ingest interrupted) Cleanup resources, threads, timers */ + @Override public void stop() { logger.log(Level.INFO, "stop()"); @@ -319,152 +405,20 @@ public final class KeywordSearchIngestModule { initialized = false; } - public String getName() { - return MODULE_NAME; - } - - public String getDescription() { - return MODULE_DESCRIPTION; - } - - public String getVersion() { - return MODULE_VERSION; - } - - /** - * Initializes the module for new ingest run Sets up threads, timers, - * retrieves settings, keyword lists to run on - * - */ - public void init(IngestModuleInit initContext) { - logger.log(Level.INFO, "init()"); - services = IngestServices.getDefault(); - initialized = false; - - caseHandle = Case.getCurrentCase().getSleuthkitCase(); - - tikaFormatDetector = new Tika(); - - ingester = Server.getIngester(); - - final Server server = KeywordSearch.getServer(); - try { - if (!server.isRunning()) { - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - logger.log(Level.SEVERE, msg); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); -// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - - } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); - //this means Solr is not properly initialized - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); -// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - } - - - //initialize extractors - stringExtractor = new AbstractFileStringExtract(); - stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); - stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); - - - //log the scripts used for debugging - final StringBuilder sbScripts = new StringBuilder(); - for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { - sbScripts.append(s.name()).append(" "); - } - logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString()); - - textExtractors = new ArrayList(); - //order matters, more specific extractors first - textExtractors.add(new AbstractFileHtmlExtract()); - textExtractors.add(new AbstractFileTikaTextExtract()); - - - ingestStatus = new HashMap(); - - keywords = new ArrayList(); - keywordLists = new ArrayList(); - keywordToList = new HashMap(); - - initKeywords(); - - if (keywords.isEmpty() || keywordLists.isEmpty()) { -// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), -// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } - - processedFiles = false; - finalSearcherDone = false; - searcherDone = true; //make sure to start the initial currentSearcher - //keeps track of all results per run not to repeat reporting the same hits - currentResults = new HashMap>(); - - curDataSourceIds = new HashSet(); - - indexer = new Indexer(); - - final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; - logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs); - logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs); - - commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); - searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); - - initialized = true; - - commitTimer.start(); - searchTimer.start(); - } - - public boolean hasSimpleConfiguration() { - return true; - } - - public boolean hasAdvancedConfiguration() { - return true; - } - - public javax.swing.JPanel getSimpleConfiguration(String context) { - KeywordSearchListsXML.getCurrent().reload(); - - if (null == simpleConfigPanel) { - simpleConfigPanel = new KeywordSearchIngestSimplePanel(); - } - else { - simpleConfigPanel.load(); - } - - return simpleConfigPanel; - } - - public javax.swing.JPanel getAdvancedConfiguration(String context) { - if (advancedConfigPanel == null) { - advancedConfigPanel = new KeywordSearchConfigurationPanel(); - } - - advancedConfigPanel.load(); - return advancedConfigPanel; - } - - public void saveAdvancedConfiguration() { - if (advancedConfigPanel != null) { - advancedConfigPanel.store(); - } - - if (simpleConfigPanel != null) { - simpleConfigPanel.load(); - } - } - - public void saveSimpleConfiguration() { - KeywordSearchListsXML.getCurrent().save(); - } + // RJCTODO +// public void saveAdvancedConfiguration() { +// if (advancedConfigPanel != null) { +// advancedConfigPanel.store(); +// } +// +// if (simpleConfigPanel != null) { +// simpleConfigPanel.load(); +// } +// } +// +// public void saveSimpleConfiguration() { +// KeywordSearchListsXML.getCurrent().save(); +// } /** * The modules maintains background threads, return true if background @@ -473,15 +427,15 @@ public final class KeywordSearchIngestModule { * * @return */ - public boolean hasBackgroundJobsRunning() { - if ((currentSearcher != null && searcherDone == false) - || (finalSearcherDone == false)) { - return true; - } else { - return false; - } - - } + // RJCTODO: +// public boolean hasBackgroundJobsRunning() { +// if ((currentSearcher != null && searcherDone == false) +// || (finalSearcherDone == false)) { +// return true; +// } else { +// return false; +// } +// } /** * Commits index and notifies listeners of index update @@ -540,7 +494,7 @@ public final class KeywordSearchIngestModule { msg.append("").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("").append(error_io).append(""); msg.append(""); String indexStats = msg.toString(); - logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats); + logger.log(Level.INFO, "Keyword Indexing Completed: {0}", indexStats); // RJCTODO // services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats)); if (error_index > 0) { @@ -561,10 +515,8 @@ public final class KeywordSearchIngestModule { try { final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles(); KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles)); - } catch (NoOpenCoreException ex) { + } catch (NoOpenCoreException | KeywordSearchModuleException ex) { logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); - } catch (KeywordSearchModuleException se) { - logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se); } } @@ -611,7 +563,7 @@ public final class KeywordSearchIngestModule { } - logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString()); + logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString()); } @@ -703,8 +655,7 @@ public final class KeywordSearchIngestModule { } if (fileExtract == null) { - logger.log(Level.INFO, "No text extractor found for file id:" - + aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat); + logger.log(Level.INFO, "No text extractor found for file id:{0}, name: {1}, detected format: {2}", new Object[]{aFile.getId(), aFile.getName(), detectedFormat}); return false; } @@ -727,7 +678,7 @@ public final class KeywordSearchIngestModule { ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); return true; } else { - logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); return false; } @@ -833,7 +784,7 @@ public final class KeywordSearchIngestModule { try { //logger.log(Level.INFO, "indexing: " + aFile.getName()); if (!extractTextAndIndex(aFile, detectedFormat)) { - logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ")."); + logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); } else { ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); @@ -877,9 +828,9 @@ public final class KeywordSearchIngestModule { private boolean finalRun = false; Searcher(List keywordLists) { - this.keywordLists = new ArrayList(keywordLists); - this.keywords = new ArrayList(); - this.keywordToList = new HashMap(); + this.keywordLists = new ArrayList<>(keywordLists); + this.keywords = new ArrayList<>(); + this.keywordToList = new HashMap<>(); //keywords are populated as searcher runs } @@ -944,7 +895,7 @@ public final class KeywordSearchIngestModule { for (Keyword keywordQuery : keywords) { if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keywordQuery.getQuery()); return null; } @@ -975,7 +926,7 @@ public final class KeywordSearchIngestModule { final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds); del.addFilter(dataSourceFilter); - Map> queryResult = null; + Map> queryResult; try { queryResult = del.performQuery(); @@ -986,7 +937,7 @@ public final class KeywordSearchIngestModule { //likely case has closed and threads are being interrupted return null; } catch (CancellationException e) { - logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keywordQuery.getQuery()); return null; } catch (Exception e) { logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e); @@ -1002,7 +953,7 @@ public final class KeywordSearchIngestModule { //write results to BB //new artifacts created, to report to listeners - Collection newArtifacts = new ArrayList(); + Collection newArtifacts = new ArrayList<>(); //scale progress bar more more granular, per result sub-progress, within per keyword int totalUnits = newResults.size(); @@ -1019,7 +970,7 @@ public final class KeywordSearchIngestModule { for (final Keyword hitTerm : newResults.keySet()) { //checking for cancellation between results if (this.isCancelled()) { - logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery()); + logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: {0}", keywordQuery.getQuery()); return null; } @@ -1036,7 +987,7 @@ public final class KeywordSearchIngestModule { for (final AbstractFile hitFile : contentHitsFlattened.keySet()) { // get the snippet for the first hit in the file - String snippet = null; + String snippet; final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery()); int chunkId = contentHitsFlattened.get(hitFile); try { @@ -1053,7 +1004,7 @@ public final class KeywordSearchIngestModule { // write the blackboard artifact for this keyword in this file KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName); if (written == null) { - logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString()); + logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hitFile, hitTerm.toString()}); continue; } @@ -1128,7 +1079,7 @@ public final class KeywordSearchIngestModule { } detailsSb.append(""); -// services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); + services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } //for each file hit @@ -1156,7 +1107,7 @@ public final class KeywordSearchIngestModule { try { finalizeSearcher(); stopWatch.stop(); - logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs."); + logger.log(Level.INFO, "Searcher took to run: {0} secs.", stopWatch.getElapsedTimeSecs()); } finally { searcherLock.unlock(); } @@ -1226,13 +1177,13 @@ public final class KeywordSearchIngestModule { //calculate new results but substracting results already obtained in this ingest //update currentResults map with the new results private Map> filterResults(Map> queryResult, boolean isRegex) { - Map> newResults = new HashMap>(); + Map> newResults = new HashMap<>(); for (String termResult : queryResult.keySet()) { List queryTermResults = queryResult.get(termResult); //translate to list of IDs that we keep track of - List queryTermResultsIDs = new ArrayList(); + List queryTermResultsIDs = new ArrayList<>(); for (ContentHit ch : queryTermResults) { queryTermResultsIDs.add(ch.getId()); } @@ -1249,7 +1200,7 @@ public final class KeywordSearchIngestModule { //add to new results List newResultsFs = newResults.get(termResultK); if (newResultsFs == null) { - newResultsFs = new ArrayList(); + newResultsFs = new ArrayList<>(); newResults.put(termResultK, newResultsFs); } newResultsFs.add(res); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java new file mode 100755 index 0000000000..1e3c559082 --- /dev/null +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchModuleFactory.java @@ -0,0 +1,121 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.keywordsearch; + +import java.io.Serializable; +import javax.swing.JPanel; +import org.openide.util.NbBundle; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do keyword searches. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class KeywordSearchModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleName"); + } + + @Override + public String getModuleDescription() { + return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription"); + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + @Override + public Serializable getDefaultIngestOptions() { + return new IngestOptions(); + } + + @Override + public boolean providesIngestOptionsPanels() { + return true; + } + + @Override + public JPanel getIngestOptionsPanel(Serializable ingestOptions) { + KeywordSearchListsXML.getCurrent().reload(); + return new KeywordSearchIngestSimplePanel(); // RJCTODO: Load required? + } + + @Override + public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws IngestModuleFactory.InvalidOptionsException { + if (!(ingestOptionsPanel instanceof KeywordSearchIngestSimplePanel)) { + throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO + } + + KeywordSearchIngestSimplePanel panel = (KeywordSearchIngestSimplePanel)ingestOptionsPanel; + panel.store(); + + return new IngestOptions(); // RJCTODO + } + + @Override + public boolean providesGlobalOptionsPanels() { + return true; + } + + @Override + public JPanel getGlobalOptionsPanel() { + KeywordSearchConfigurationPanel globalOptionsPanel = new KeywordSearchConfigurationPanel(); + globalOptionsPanel.load(); + return globalOptionsPanel; + } + + @Override + public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws IngestModuleFactory.InvalidOptionsException { + if (!(globalOptionsPanel instanceof KeywordSearchConfigurationPanel)) { + throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO + } + + KeywordSearchConfigurationPanel panel = (KeywordSearchConfigurationPanel)globalOptionsPanel; + panel.store(); + // RJCTODO: Need simple panel store? May need to change implementation...see also hash db factory + } + + @Override + public boolean isFileIngestModuleFactory() { + return true; + } + + @Override + public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new KeywordSearchIngestModule(); + } + + private static class IngestOptions implements Serializable { + // RJCTODO: Any options here? + // boolean alwaysCalcHashes = true; +// ArrayList hashSetNames = new ArrayList<>(); + } +} diff --git a/ewfVerify/nbproject/project.xml b/ewfVerify/nbproject/project.xml index a3955c75fa..1c9b1dd905 100755 --- a/ewfVerify/nbproject/project.xml +++ b/ewfVerify/nbproject/project.xml @@ -6,6 +6,14 @@ org.sleuthkit.autopsy.ewfverify + + org.openide.util.lookup + + + + 8.19.1 + + org.sleuthkit.autopsy.core diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java new file mode 100755 index 0000000000..8087d249b0 --- /dev/null +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java @@ -0,0 +1,63 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.ewfverify; + +import java.io.Serializable; +import org.openide.util.lookup.ServiceProvider; +import org.sleuthkit.autopsy.coreutils.Version; +import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; +import org.sleuthkit.autopsy.ingest.IngestModuleFactory; + +/** + * An factory that creates file ingest modules that do hash database lookups. + */ +@ServiceProvider(service=IngestModuleFactory.class) +public class EwfVerifierModuleFactory extends AbstractIngestModuleFactory { + @Override + public String getModuleDisplayName() { + return getModuleName(); + } + + static String getModuleName() { + return "EWF Verify"; // RJCTODO: Is this what we want here? + } + + @Override + public String getModuleDescription() { + return "Validates the integrity of E01 files."; + } + + @Override + public String getModuleVersionNumber() { + return Version.getVersion(); + } + + + @Override + public boolean isDataSourceIngestModuleFactory() { + return true; + } + + @Override + public DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException { + return new EwfVerifyIngestModule(); + } +} diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index b006eb386e..3140135375 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013 Basis Technology Corp. + * Copyright 2013-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,204 +16,180 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -//package org.sleuthkit.autopsy.ewfverify; -// -//import java.security.MessageDigest; -//import java.security.NoSuchAlgorithmException; -//import java.util.logging.Level; -//import java.util.logging.Logger; -//import javax.xml.bind.DatatypeConverter; -//import org.sleuthkit.autopsy.casemodule.Case; -//import org.sleuthkit.autopsy.coreutils.Version; -//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; -//import org.sleuthkit.autopsy.ingest.IngestMessage; -//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; -//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource; -//import org.sleuthkit.autopsy.ingest.IngestModuleInit; -//import org.sleuthkit.autopsy.ingest.IngestServices; -//import org.sleuthkit.datamodel.Content; -//import org.sleuthkit.datamodel.Image; -//import org.sleuthkit.datamodel.SleuthkitCase; -//import org.sleuthkit.datamodel.TskCoreException; -//import org.sleuthkit.datamodel.TskData; -///** -// * Data Source Ingest Module that generates a hash of an E01 image file and -// * verifies it with the value stored in the image. -// * -// * @author jwallace -// */ -//public class EwfVerifyIngestModule extends IngestModuleDataSource { -// private static final String MODULE_NAME = "EWF Verify"; -// private static final String MODULE_VERSION = Version.getVersion(); -// private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; -// private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; -// private IngestServices services; -// private volatile boolean running = false; -// private Image img; -// private String imgName; -// private MessageDigest messageDigest; -// private static Logger logger = null; -// private static int messageId = 0; -// private boolean verified = false; -// private boolean skipped = false; -// private String calculatedHash = ""; -// private String storedHash = ""; -// private SleuthkitCase skCase; -// -// public EwfVerifyIngestModule() { -// } -// -// @Override -// public void process(PipelineContext pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { -// imgName = dataSource.getName(); -// try { -// img = dataSource.getImage(); -// } catch (TskCoreException ex) { -// img = null; -// logger.log(Level.SEVERE, "Failed to get image from Content.", ex); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Error processing " + imgName)); -// return; -// } -// -// // Skip images that are not E01 -// if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { -// img = null; -// logger.log(Level.INFO, "Skipping non-ewf image " + imgName); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, -// "Skipping non-ewf image " + imgName)); -// skipped = true; -// return; -// } -// -// -// if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) -// { -// storedHash = img.getMd5().toLowerCase(); -// logger.info("Hash value stored in " + imgName + ": " + storedHash); -// -// } -// else { -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Image " + imgName + " does not have stored hash.")); -// return; -// } -// -// logger.log(Level.INFO, "Starting ewf verification of " + img.getName()); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, -// "Starting " + imgName)); -// -// long size = img.getSize(); -// if (size == 0) { -// logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried."); -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, -// "Error getting size of " + imgName + ". Image will not be processed.")); -// } -// -// // Libewf uses a sector size of 64 times the sector size, which is the -// // motivation for using it here. -// long chunkSize = 64 * img.getSsize(); -// chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; -// -// int totalChunks = (int) Math.ceil(size / chunkSize); -// logger.log(Level.INFO, "Total chunks = " + totalChunks); -// int read; -// -// byte[] data; -// controller.switchToDeterminate(totalChunks); -// -// running = true; -// // Read in byte size chunks and update the hash value with the data. -// for (int i = 0; i < totalChunks; i++) { -// if (controller.isCancelled()) { -// running = false; -// return; -// } -// data = new byte[ (int) chunkSize ]; -// try { -// read = img.read(data, i * chunkSize, chunkSize); -// } catch (TskCoreException ex) { -// String msg = "Error reading " + imgName + " at chunk " + i; -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); -// logger.log(Level.SEVERE, msg, ex); -// return; -// } -// messageDigest.update(data); -// controller.progress(i); -// } -// -// // Finish generating the hash and get it as a string value -// calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); -// verified = calculatedHash.equals(storedHash); -// logger.info("Hash calculated from " + imgName + ": " + calculatedHash); -// running = false; -// } -// -// @Override -// public void init(IngestModuleInit initContext) { -// services = IngestServices.getDefault(); -// skCase = Case.getCurrentCase().getSleuthkitCase(); -// running = false; -// verified = false; -// skipped = false; -// img = null; -// imgName = ""; -// storedHash = ""; -// calculatedHash = ""; -// -// if (logger == null) { -// logger = services.getLogger(this); -// } -// -// if (messageDigest == null) { -// try { -// messageDigest = MessageDigest.getInstance("MD5"); -// } catch (NoSuchAlgorithmException ex) { -// logger.log(Level.WARNING, "Error getting md5 algorithm", ex); -// throw new RuntimeException("Failed to get MD5 algorithm"); -// } -// } else { -// messageDigest.reset(); -// } -// } -// -// @Override -// public void complete() { -// logger.info("complete() " + this.getName()); -// if (skipped == false) { -// String msg = verified ? " verified" : " not verified"; -// String extra = "

    EWF Verification Results for " + imgName + "

    "; -// extra += "
  • Result:" + msg + "
  • "; -// extra += "
  • Calculated hash: " + calculatedHash + "
  • "; -// extra += "
  • Stored hash: " + storedHash + "
  • "; -// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); -// logger.info(imgName + msg); -// } -// } -// -// @Override -// public void stop() { -// running = false; -// } -// -// @Override -// public String getName() { -// return MODULE_NAME; -// } -// -// @Override -// public String getVersion() { -// return MODULE_VERSION; -// } -// -// @Override -// public String getDescription() { -// return MODULE_DESCRIPTION; -// } -// -// @Override -// public boolean hasBackgroundJobsRunning() { -// return running; -// } -//} +package org.sleuthkit.autopsy.ewfverify; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.logging.Level; +import java.util.logging.Logger; +import javax.xml.bind.DatatypeConverter; +import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; +import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; +import org.sleuthkit.autopsy.ingest.IngestServices; +import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.Image; +import org.sleuthkit.datamodel.SleuthkitCase; +import org.sleuthkit.datamodel.TskCoreException; +import org.sleuthkit.datamodel.TskData; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; + +/** + * Data source ingest module that verifies the integrity of an Expert Witness + * Format (EWF) E01 image file by generating a hash of the file and comparing it + * to the value stored in the image. + */ +public class EwfVerifyIngestModule implements DataSourceIngestModule { + private static final long DEFAULT_CHUNK_SIZE = 32 * 1024; + private IngestServices services; + private volatile boolean running = false; + private Image img; + private String imgName; + private MessageDigest messageDigest; + private static Logger logger = null; + private static int messageId = 0; + private boolean verified = false; + private boolean skipped = false; + private String calculatedHash = ""; + private String storedHash = ""; + + EwfVerifyIngestModule() { + } + + @Override + public String getDisplayName() { + return EwfVerifierModuleFactory.getModuleName(); + } + + @Override + public void init(long taskId) { + services = IngestServices.getDefault(); + running = false; + verified = false; + skipped = false; + img = null; + imgName = ""; + storedHash = ""; + calculatedHash = ""; + + if (logger == null) { + logger = services.getLogger(this); + } + + if (messageDigest == null) { + try { + messageDigest = MessageDigest.getInstance("MD5"); + } catch (NoSuchAlgorithmException ex) { + logger.log(Level.WARNING, "Error getting md5 algorithm", ex); + throw new RuntimeException("Failed to get MD5 algorithm"); + } + } else { + messageDigest.reset(); + } + } + + @Override + public void process(Content dataSource, IngestDataSourceWorkerController statusHelper) { + imgName = dataSource.getName(); + try { + img = dataSource.getImage(); + } catch (TskCoreException ex) { + img = null; + logger.log(Level.SEVERE, "Failed to get image from Content.", ex); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error processing " + imgName)); + return; + } + + // Skip images that are not E01 + if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { + img = null; + logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Skipping non-ewf image " + imgName)); + skipped = true; + return; + } + + if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) + { + storedHash = img.getMd5().toLowerCase(); + logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); + + } + else { + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Image " + imgName + " does not have stored hash.")); + return; + } + + logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName()); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, + "Starting " + imgName)); + + long size = img.getSize(); + if (size == 0) { + logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, + "Error getting size of " + imgName + ". Image will not be processed.")); + } + + // Libewf uses a sector size of 64 times the sector size, which is the + // motivation for using it here. + long chunkSize = 64 * img.getSsize(); + chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize; + + int totalChunks = (int) Math.ceil(size / chunkSize); + logger.log(Level.INFO, "Total chunks = {0}", totalChunks); + int read; + + byte[] data; + statusHelper.switchToDeterminate(totalChunks); + + running = true; + // Read in byte size chunks and update the hash value with the data. + for (int i = 0; i < totalChunks; i++) { + if (statusHelper.isCancelled()) { + running = false; + return; + } + data = new byte[ (int) chunkSize ]; + try { + read = img.read(data, i * chunkSize, chunkSize); + } catch (TskCoreException ex) { + String msg = "Error reading " + imgName + " at chunk " + i; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); + logger.log(Level.SEVERE, msg, ex); + return; + } + messageDigest.update(data); + statusHelper.progress(i); + } + + // Finish generating the hash and get it as a string value + calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase(); + verified = calculatedHash.equals(storedHash); + logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); + running = false; + } + + @Override + public void complete() { + logger.log(Level.INFO, "complete() {0}", getDisplayName()); + if (skipped == false) { + String msg = verified ? " verified" : " not verified"; + String extra = "

    EWF Verification Results for " + imgName + "

    "; + extra += "
  • Result:" + msg + "
  • "; + extra += "
  • Calculated hash: " + calculatedHash + "
  • "; + extra += "
  • Stored hash: " + storedHash + "
  • "; + services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra)); + logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg}); + } + } + + @Override + public void stop() { + } +} From c08f1c3bfb2d49229386f5c94f03594cff5d20e3 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 4 Mar 2014 12:50:56 -0500 Subject: [PATCH 18/48] More work on adapting ingest modules to new infrastructure --- .../ingest/DataSourceIngestModule.java | 3 +- .../autopsy/ingest/FileIngestModule.java | 7 +- .../autopsy/ingest/IngestManager.java | 22 +- .../autopsy/ingest/IngestModule.java | 6 + .../autopsy/ingest/IngestServices.java | 23 +- .../ExifParserFileIngestModule.java | 30 ++- .../hashdatabase/HashDbIngestModule.java | 25 +- .../AbstractFileHtmlExtract.java | 4 +- .../KeywordSearchConfigurationPanel.java | 2 +- .../KeywordSearchIngestModule.java | 218 +++++++++--------- .../KeywordSearchOptionsPanelController.java | 38 ++- .../ewfverify/EwfVerifyIngestModule.java | 13 +- 12 files changed, 206 insertions(+), 185 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 38e5a959a1..bff7392cf9 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -31,6 +31,7 @@ public interface DataSourceIngestModule extends IngestModule { * @param dataSource The data source to process. * @param statusHelper A status helper to be used to report progress and * detect task cancellation. + * @return RJCTODO */ - void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class + ProcessResult process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class, or better, get rid of it so all threads in ingest can be the same } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java index 1caf184d67..6765cc1e00 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestModule.java @@ -27,8 +27,9 @@ import org.sleuthkit.datamodel.AbstractFile; public interface FileIngestModule extends IngestModule { /** - * Process a file. - * @param file The file to process. + * RJCTODO + * @param file + * @return */ - void process(AbstractFile file); + ProcessResult process(AbstractFile file); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 6c7c226568..2ff35564a1 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -60,7 +60,7 @@ public class IngestManager { private IngestAbstractFileProcessor abstractFileIngester; private List dataSourceIngesters; private SwingWorker queueWorker; -// private final Map abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete + private final Map abstractFileModulesRetValues = new HashMap<>(); private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class); private final IngestMonitor ingestMonitor = new IngestMonitor(); // private IngestModuleLoader moduleLoader = null; @@ -227,8 +227,6 @@ public class IngestManager { } } - // RJCTODO: This method and the concept it supports (modules are able to query the success or failure of - // other modules in the pipeline by name) may be obsolete. /** * Returns the return value from a previously run module on the file being * currently analyzed. @@ -236,15 +234,15 @@ public class IngestManager { * @param moduleName Name of module. * @returns Return value from that module if it was previously run. */ -// IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { -// synchronized (abstractFileModulesRetValues) { -// if (abstractFileModulesRetValues.containsKey(moduleName)) { -// return abstractFileModulesRetValues.get(moduleName); -// } else { -// return IngestModuleAbstractFile.ProcessResult.UNKNOWN; -// } -// } -// } + IngestModule.ProcessResult getAbstractFileModuleResult(String moduleName) { + synchronized (abstractFileModulesRetValues) { + if (abstractFileModulesRetValues.containsKey(moduleName)) { + return abstractFileModulesRetValues.get(moduleName); + } else { + return IngestModule.ProcessResult.UNKNOWN; // RJCTODO: Not yet determined? + } + } + } // RJCTODO: Update comment /** diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java index 0f851c6369..c15d16c9c5 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModule.java @@ -24,6 +24,12 @@ package org.sleuthkit.autopsy.ingest; */ public interface IngestModule { + public enum ProcessResult { // RJCTODO: Refactor to something like ProcessingResult or probably just Result or ResultCode + OK, + ERROR, // RJCTODO: Consider replacing comments that existed when this was specific to file ingest modules + UNKNOWN // RJCTODO: This apperars to b e specifci + }; + /** * Invoked to obtain a display name for the module, i.e., a name that is * suitable for presentation to a user in a user interface component or a diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java index 217d59fdab..5bdc26c292 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java @@ -117,9 +117,8 @@ public class IngestServices { public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { IngestManager.fireModuleDataEvent(moduleDataEvent); } - - - /** + + /** * Fire module content event to notify registered module content event listeners * that there is new content (from ZIP file contents, carving, etc.) * @param moduleContentEvent module content event, encapsulating content changed @@ -141,18 +140,16 @@ public class IngestServices { manager.scheduleFile(dataSourceTaskId, file); } - /** - * Get free disk space of a drive where ingest data are written to - * That drive is being monitored by IngestMonitor thread when ingest is running. - * - * @return amount of disk space, -1 if unknown - */ + * Get free disk space of a drive where ingest data are written to + * That drive is being monitored by IngestMonitor thread when ingest is running. + * + * @return amount of disk space, -1 if unknown + */ public long getFreeDiskSpace() { return manager.getFreeDiskSpace(); } - // RJCTODO: Thsi may be obsolete /** * Facility for a file ingest module to check a return value from a previously run file ingest module * that executed for the same file. @@ -161,9 +158,9 @@ public class IngestServices { * @param moduleName registered module name of the module to check the return value of * @return the return value of the previously executed module for the currently processed file in the file ingest pipeline */ -// public IngestModuleAbstractFile.ProcessResult getAbstractFileModuleResult(String moduleName) { -// return manager.getAbstractFileModuleResult(moduleName); -// } + public IngestModule.ProcessResult getAbstractFileModuleResult(String moduleName) { + return manager.getAbstractFileModuleResult(moduleName); + } /** * Gets a specific name/value configuration setting for a module diff --git a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java index 036b6be06c..e3c3c549c3 100644 --- a/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java +++ b/ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserFileIngestModule.java @@ -77,16 +77,16 @@ public final class ExifParserFileIngestModule implements FileIngestModule { } @Override - public void process(AbstractFile content) { + public ProcessResult process(AbstractFile content) { //skip unalloc if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return; + return ProcessResult.OK; } // skip known if (content.getKnown().equals(TskData.FileKnown.KNOWN)) { - return; + return ProcessResult.OK; } // update the tree every 1000 files if we have EXIF data that is not being being displayed @@ -98,13 +98,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule { //skip unsupported if (!parsableFormat(content)) { - return; + return ProcessResult.OK; } - processFile(content); + return processFile(content); } - public void processFile(AbstractFile f) { + ProcessResult processFile(AbstractFile f) { InputStream in = null; BufferedInputStream bin = null; @@ -161,13 +161,22 @@ public final class ExifParserFileIngestModule implements FileIngestModule { bba.addAttributes(attributes); filesToFire = true; } - } catch (TskCoreException ex) { + + return ProcessResult.OK; + } + catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage()); - } catch (ImageProcessingException ex) { + return ProcessResult.ERROR; + } + catch (ImageProcessingException ex) { logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()}); - } catch (IOException ex) { + return ProcessResult.ERROR; + } + catch (IOException ex) { logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex); - } finally { + return ProcessResult.ERROR; + } + finally { try { if (in != null) { in.close(); @@ -177,6 +186,7 @@ public final class ExifParserFileIngestModule implements FileIngestModule { } } catch (IOException ex) { logger.log(Level.WARNING, "Failed to close InputStream.", ex); + return ProcessResult.ERROR; } } } diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 3ba5f954df..2b64b4cffc 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -112,15 +112,15 @@ public class HashDbIngestModule implements FileIngestModule { } @Override - public void process(AbstractFile file) { + public ProcessResult process(AbstractFile file) { // Skip unallocated space files. if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) { - return; + return ProcessResult.OK; } // bail out if we have no hashes set if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) { - return; + return ProcessResult.OK; } // calc hash value @@ -141,14 +141,13 @@ public class HashDbIngestModule implements FileIngestModule { NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", name))); -// return ProcessResult.ERROR; - return; + return ProcessResult.ERROR; } } // look up in known bad first boolean foundBad = false; -// ProcessResult ret = ProcessResult.OK; + ProcessResult ret = ProcessResult.OK; for (HashDb db : knownBadHashSets) { try { long lookupstart = System.currentTimeMillis(); @@ -168,7 +167,7 @@ public class HashDbIngestModule implements FileIngestModule { NbBundle.getMessage(this.getClass(), "HashDbIngestModule.settingKnownBadStateErr", name))); -// ret = ProcessResult.ERROR; + ret = ProcessResult.ERROR; } String hashSetName = db.getHashSetName(); @@ -199,8 +198,7 @@ public class HashDbIngestModule implements FileIngestModule { NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownBadHashValueErr", name))); -// RJCTODO - // ret = ProcessResult.ERROR; + ret = ProcessResult.ERROR; } } @@ -217,8 +215,7 @@ public class HashDbIngestModule implements FileIngestModule { break; } catch (TskException ex) { logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex); - // RJCTODO - // ret = ProcessResult.ERROR; + ret = ProcessResult.ERROR; } } lookuptime += (System.currentTimeMillis() - lookupstart); @@ -232,18 +229,16 @@ public class HashDbIngestModule implements FileIngestModule { NbBundle.getMessage(this.getClass(), "HashDbIngestModule.lookingUpKnownHashValueErr", name))); - // RJCTODO - // ret = ProcessResult.ERROR; + ret = ProcessResult.ERROR; } } } -// return ret; + return ret; } private void postHashSetHitToBlackboard(AbstractFile abstractFile, String md5Hash, String hashSetName, String comment, boolean showInboxMessage) { try { - // RJCTODO String MODULE_NAME = NbBundle.getMessage(HashDbIngestModule.class, "HashDbIngestModule.moduleName"); BlackboardArtifact badFile = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_HASHSET_HIT); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileHtmlExtract.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileHtmlExtract.java index 49ca84aa8b..728cc57bb2 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileHtmlExtract.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileHtmlExtract.java @@ -60,8 +60,8 @@ import org.sleuthkit.datamodel.ReadContentInputStream; //"application/xml-dtd", ); - AbstractFileHtmlExtract() { - this.module = KeywordSearchIngestModule.getDefault(); + AbstractFileHtmlExtract(KeywordSearchIngestModule module) { + this.module = module; ingester = Server.getIngester(); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel.java index 0b33a5aea8..54518ce9d5 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchConfigurationPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 - 2013 Basis Technology Corp. + * Copyright 2011 - 2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 0ffb87527c..6a3a81438b 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -66,7 +66,6 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData.FileKnown; -// RJCTODO: Update for new infrastructure /** * An ingest module on a file level Performs indexing of allocated and Solr * supported files, string extraction and indexing of unallocated and not Solr @@ -134,7 +133,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { private KeywordSearchConfigurationPanel advancedConfigPanel; private Tika tikaFormatDetector; - private enum IngestStatus { TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested STRINGS_INGESTED, ///< Strings were extracted from file @@ -148,108 +146,14 @@ public final class KeywordSearchIngestModule implements FileIngestModule { KeywordSearchIngestModule() { } - /** - * Initializes the module for new ingest run Sets up threads, timers, - * retrieves settings, keyword lists to run on - * - */ @Override - public void init(long taskId) { - logger.log(Level.INFO, "init()"); - services = IngestServices.getDefault(); - initialized = false; - - caseHandle = Case.getCurrentCase().getSleuthkitCase(); - - tikaFormatDetector = new Tika(); - - ingester = Server.getIngester(); - - final Server server = KeywordSearch.getServer(); - try { - if (!server.isRunning()) { - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - logger.log(Level.SEVERE, msg); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - - } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); - //this means Solr is not properly initialized - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); - return; - } - - - //initialize extractors - stringExtractor = new AbstractFileStringExtract(); - stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); - stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); - - - //log the scripts used for debugging - final StringBuilder sbScripts = new StringBuilder(); - for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { - sbScripts.append(s.name()).append(" "); - } - logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString()); - - textExtractors = new ArrayList<>(); - //order matters, more specific extractors first - textExtractors.add(new AbstractFileHtmlExtract()); - textExtractors.add(new AbstractFileTikaTextExtract()); - - - ingestStatus = new HashMap<>(); - - keywords = new ArrayList<>(); - keywordLists = new ArrayList<>(); - keywordToList = new HashMap<>(); - - initKeywords(); - - if (keywords.isEmpty() || keywordLists.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } - - processedFiles = false; - finalSearcherDone = false; - searcherDone = true; //make sure to start the initial currentSearcher - //keeps track of all results per run not to repeat reporting the same hits - currentResults = new HashMap<>(); - - curDataSourceIds = new HashSet<>(); - - indexer = new Indexer(); - - final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; - logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs); - logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs); - - commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); - searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); - - initialized = true; - - commitTimer.start(); - searchTimer.start(); - } - - @Override - public void process(AbstractFile abstractFile) { + public ProcessResult process(AbstractFile abstractFile) { if (initialized == false) //error initializing indexing/Solr { logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); - return; - // RJCTODO -// return ProcessResult.OK; + return ProcessResult.OK; } try { //add data source id of the file to the set, keeping track of images being ingested @@ -262,25 +166,20 @@ public final class KeywordSearchIngestModule implements FileIngestModule { if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) { //skip indexing of virtual dirs (no content, no real name) - will index children files - return; - // RJCTODO -// return ProcessResult.OK; + return ProcessResult.OK; } - // RJCTODO //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it -// if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { -// indexer.indexFile(abstractFile, false); -// //notify depending module that keyword search (would) encountered error for this file -// ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_IO); -// return ProcessResult.ERROR; -// } -// else if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { - if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { + if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { + indexer.indexFile(abstractFile, false); + //notify depending module that keyword search (would) encountered error for this file + ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_IO); + return ProcessResult.ERROR; + } + else if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) { //index meta-data only indexer.indexFile(abstractFile, false); - // RJCTODO - // return ProcessResult.OK; + return ProcessResult.OK; } processedFiles = true; @@ -291,8 +190,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //index the file and content (if the content is supported) indexer.indexFile(abstractFile, true); - // RJCTODO -// return ProcessResult.OK; + return ProcessResult.OK; } /** @@ -405,6 +303,98 @@ public final class KeywordSearchIngestModule implements FileIngestModule { initialized = false; } + /** + * Initializes the module for new ingest run Sets up threads, timers, + * retrieves settings, keyword lists to run on + * + */ + @Override + public void init(long taskId) { + logger.log(Level.INFO, "init()"); + services = IngestServices.getDefault(); + initialized = false; + + caseHandle = Case.getCurrentCase().getSleuthkitCase(); + + tikaFormatDetector = new Tika(); + + ingester = Server.getIngester(); + + final Server server = KeywordSearch.getServer(); + try { + if (!server.isRunning()) { + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + logger.log(Level.SEVERE, msg); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + + } + } catch (KeywordSearchModuleException ex) { + logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); + //this means Solr is not properly initialized + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + return; + } + + + //initialize extractors + stringExtractor = new AbstractFileStringExtract(); + stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); + stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); + + + //log the scripts used for debugging + final StringBuilder sbScripts = new StringBuilder(); + for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { + sbScripts.append(s.name()).append(" "); + } + logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString()); + + textExtractors = new ArrayList<>(); + //order matters, more specific extractors first + textExtractors.add(new AbstractFileHtmlExtract(this)); + textExtractors.add(new AbstractFileTikaTextExtract()); + + + ingestStatus = new HashMap<>(); + + keywords = new ArrayList<>(); + keywordLists = new ArrayList<>(); + keywordToList = new HashMap<>(); + + initKeywords(); + + if (keywords.isEmpty() || keywordLists.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); + } + + processedFiles = false; + finalSearcherDone = false; + searcherDone = true; //make sure to start the initial currentSearcher + //keeps track of all results per run not to repeat reporting the same hits + currentResults = new HashMap<>(); + + curDataSourceIds = new HashSet<>(); + + indexer = new Indexer(); + + final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000; + logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs); + logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs); + + commitTimer = new Timer(updateIntervalMs, new CommitTimerAction()); + searchTimer = new Timer(updateIntervalMs, new SearchTimerAction()); + + initialized = true; + + commitTimer.start(); + searchTimer.start(); + } + // RJCTODO // public void saveAdvancedConfiguration() { // if (advancedConfigPanel != null) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchOptionsPanelController.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchOptionsPanelController.java index cf28279578..675c4ffaf8 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchOptionsPanelController.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchOptionsPanelController.java @@ -1,7 +1,22 @@ /* - * To change this template, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2011 - 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ + package org.sleuthkit.autopsy.keywordsearch; import java.beans.PropertyChangeListener; @@ -13,14 +28,13 @@ import org.openide.util.Lookup; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; + @OptionsPanelController.TopLevelRegistration( categoryName = "#OptionsCategory_Name_KeywordSearchOptions", iconBase = "org/sleuthkit/autopsy/keywordsearch/options-icon.png", position = 2, keywords = "#OptionsCategory_Keywords_KeywordSearchOptions", keywordsCategory = "KeywordSearchOptions") -// moved messages to Bundle.properties -//@org.openide.util.NbBundle.Messages({"OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search", "OptionsCategory_Keywords_KeywordSearchOptions=Keyword Search"}) public final class KeywordSearchOptionsPanelController extends OptionsPanelController { private KeywordSearchConfigurationPanel panel; @@ -28,47 +42,56 @@ public final class KeywordSearchOptionsPanelController extends OptionsPanelContr private boolean changed; private static final Logger logger = Logger.getLogger(KeywordSearchConfigurationPanel.class.getName()); + @Override public void update() { getPanel().load(); changed = false; } + @Override public void applyChanges() { getPanel().store(); changed = false; } + @Override public void cancel() { getPanel().cancel(); } + @Override public boolean isValid() { return getPanel().valid(); } + @Override public boolean isChanged() { return changed; } + @Override public HelpCtx getHelpCtx() { - return null; // new HelpCtx("...ID") if you have a help set + return null; } + @Override public JComponent getComponent(Lookup masterLookup) { return getPanel(); } + @Override public void addPropertyChangeListener(PropertyChangeListener l) { pcs.addPropertyChangeListener(l); } + @Override public void removePropertyChangeListener(PropertyChangeListener l) { pcs.removePropertyChangeListener(l); } private KeywordSearchConfigurationPanel getPanel() { if (panel == null) { - panel = new KeywordSearchConfigurationPanel();//this); + panel = new KeywordSearchConfigurationPanel(); } return panel; } @@ -91,7 +114,6 @@ public final class KeywordSearchOptionsPanelController extends OptionsPanelContr catch (Exception e) { logger.log(Level.SEVERE, "KeywordSearchOptionsPanelController listener threw exception", e); MessageNotifyUtil.Notify.show("Module Error", "A module caused an error listening to KeywordSearchOptionsPanelController updates. See log to determine which module. Some data could be incomplete.", MessageNotifyUtil.MessageType.ERROR); - } - + } } } diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index 3140135375..24d179cd6b 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -91,7 +91,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { } @Override - public void process(Content dataSource, IngestDataSourceWorkerController statusHelper) { + public ProcessResult process(Content dataSource, IngestDataSourceWorkerController statusHelper) { imgName = dataSource.getName(); try { img = dataSource.getImage(); @@ -100,7 +100,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { logger.log(Level.SEVERE, "Failed to get image from Content.", ex); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + imgName)); - return; + return ProcessResult.ERROR; } // Skip images that are not E01 @@ -110,7 +110,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Skipping non-ewf image " + imgName)); skipped = true; - return; + return ProcessResult.OK; } if ((img.getMd5()!= null) && !img.getMd5().isEmpty()) @@ -122,7 +122,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { else { services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Image " + imgName + " does not have stored hash.")); - return; + return ProcessResult.ERROR; } logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName()); @@ -153,7 +153,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { for (int i = 0; i < totalChunks; i++) { if (statusHelper.isCancelled()) { running = false; - return; + return ProcessResult.OK; // RJCTODO: Use unknown? } data = new byte[ (int) chunkSize ]; try { @@ -162,7 +162,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { String msg = "Error reading " + imgName + " at chunk " + i; services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg)); logger.log(Level.SEVERE, msg, ex); - return; + return ProcessResult.ERROR; } messageDigest.update(data); statusHelper.progress(i); @@ -173,6 +173,7 @@ public class EwfVerifyIngestModule implements DataSourceIngestModule { verified = calculatedHash.equals(storedHash); logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); running = false; + return ProcessResult.OK; } @Override From 961aa33f4f9254a63b476ba6966eff4c8be613ad Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Tue, 4 Mar 2014 17:44:51 -0500 Subject: [PATCH 19/48] Modify keyword search module for new ingest API --- .../AbstractFileStringExtract.java | 6 +-- .../AbstractFileTikaTextExtract.java | 4 +- .../keywordsearch/KeywordListsManager.java | 45 +++++++++++++++++++ .../KeywordSearchEditListPanel.java | 4 +- .../KeywordSearchIngestModule.java | 26 +++++------ .../KeywordSearchListsAbstract.java | 14 +++--- .../KeywordSearchListsViewerPanel.java | 4 +- .../keywordsearch/KeywordSearchListsXML.java | 4 +- 8 files changed, 72 insertions(+), 35 deletions(-) create mode 100755 KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileStringExtract.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileStringExtract.java index 15c6aac52c..4d82827c6a 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileStringExtract.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileStringExtract.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -60,8 +60,8 @@ class AbstractFileStringExtract implements AbstractFileExtract { //STRING_CHUNK_BUF[1] = (byte) 0xBB; //STRING_CHUNK_BUF[2] = (byte) 0xBF; //} - public AbstractFileStringExtract() { - this.module = KeywordSearchIngestModule.getDefault(); + public AbstractFileStringExtract(KeywordSearchIngestModule module) { + this.module = module; this.ingester = Server.getIngester(); this.extractScripts.add(DEFAULT_SCRIPT); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java index 2aabfb480e..b08c80c44f 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AbstractFileTikaTextExtract.java @@ -71,8 +71,8 @@ class AbstractFileTikaTextExtract implements AbstractFileExtract { private final ExecutorService tikaParseExecutor = Executors.newSingleThreadExecutor(); private final List TIKA_SUPPORTED_TYPES = new ArrayList<>(); - AbstractFileTikaTextExtract() { - this.module = KeywordSearchIngestModule.getDefault(); + AbstractFileTikaTextExtract(KeywordSearchIngestModule module) { + this.module = module; ingester = Server.getIngester(); Set mediaTypes = new Tika().getParser().getSupportedTypes(new ParseContext()); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java new file mode 100755 index 0000000000..e01d0186fe --- /dev/null +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java @@ -0,0 +1,45 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2014 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.sleuthkit.autopsy.keywordsearch; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * RJCTODO: + */ +public class KeywordListsManager { + private static KeywordListsManager instance = null; + private List keywords = new ArrayList<>(); //keywords to search + private List keywordLists = new ArrayList<>(); // lists currently being searched + private Map keywordToList = new HashMap<>(); + + /** + * Gets the singleton instance of this class. + */ + public static synchronized KeywordListsManager getInstance() { + if (null == instance) { + instance = new KeywordListsManager(); + } + return instance; + } +} diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java index 21b678109c..eae85fb4aa 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,7 +17,6 @@ * limitations under the License. */ - package org.sleuthkit.autopsy.keywordsearch; import java.awt.Component; @@ -30,7 +29,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.logging.Level; - import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import java.util.regex.Pattern; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 6a3a81438b..220a857d91 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -146,6 +146,11 @@ public final class KeywordSearchIngestModule implements FileIngestModule { KeywordSearchIngestModule() { } + @Override + public String getDisplayName() { + return KeywordSearchModuleFactory.getModuleName(); + } + @Override public ProcessResult process(AbstractFile abstractFile) { @@ -170,7 +175,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { } //check if we should index meta-data only when 1) it is known 2) HashDb module errored on it - if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) { + if (services.getAbstractFileModuleResult(hashDBModuleName) == ProcessResult.ERROR) { indexer.indexFile(abstractFile, false); //notify depending module that keyword search (would) encountered error for this file ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_IO); @@ -203,7 +208,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { return; } - //logger.log(Level.INFO, "complete()"); commitTimer.stop(); //NOTE, we let the 1 before last searcher complete fully, and enqueue the last one @@ -241,8 +245,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { } //cleanup done in final searcher - - //postSummary(); } /** @@ -266,12 +268,9 @@ public final class KeywordSearchIngestModule implements FileIngestModule { runSearcher = false; finalSearcherDone = true; - //commit uncommited files, don't search again commit(); - //postSummary(); - cleanup(); } @@ -326,7 +325,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); logger.log(Level.SEVERE, msg); String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + services.postMessage(IngestMessage.createErrorMessage(++messageID, this, msg, details)); return; } @@ -335,17 +334,15 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //this means Solr is not properly initialized String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details)); + services.postMessage(IngestMessage.createErrorMessage(++messageID, this, msg, details)); return; } - //initialize extractors - stringExtractor = new AbstractFileStringExtract(); + stringExtractor = new AbstractFileStringExtract(this); stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions()); - //log the scripts used for debugging final StringBuilder sbScripts = new StringBuilder(); for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) { @@ -356,8 +353,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { textExtractors = new ArrayList<>(); //order matters, more specific extractors first textExtractors.add(new AbstractFileHtmlExtract(this)); - textExtractors.add(new AbstractFileTikaTextExtract()); - + textExtractors.add(new AbstractFileTikaTextExtract(this)); ingestStatus = new HashMap<>(); @@ -1069,7 +1065,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { } detailsSb.append(""); - services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); + services.postMessage(IngestMessage.createDataMessage(++messageID, KeywordSearchIngestModule.this, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact())); } } //for each file hit diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsAbstract.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsAbstract.java index aeaac5bf89..e145051d87 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsAbstract.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsAbstract.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -48,8 +48,8 @@ public abstract class KeywordSearchListsAbstract { public KeywordSearchListsAbstract(String filePath) { this.filePath = filePath; - theLists = new LinkedHashMap(); - lockedLists = new ArrayList(); + theLists = new LinkedHashMap<>(); + lockedLists = new ArrayList<>(); changeSupport = new PropertyChangeSupport(this); } @@ -83,20 +83,20 @@ public abstract class KeywordSearchListsAbstract { return; } //phone number - List phones = new ArrayList(); + List phones = new ArrayList<>(); phones.add(new Keyword("[(]{0,1}\\d\\d\\d[)]{0,1}[\\.-]\\d\\d\\d[\\.-]\\d\\d\\d\\d", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER)); //phones.add(new Keyword("\\d{8,10}", false)); //IP address - List ips = new ArrayList(); + List ips = new ArrayList<>(); ips.add(new Keyword("(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IP_ADDRESS)); //email - List emails = new ArrayList(); + List emails = new ArrayList<>(); emails.add(new Keyword("(?=.{8})[a-z0-9%+_-]+(?:\\.[a-z0-9%+_-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z]{2,4}(? urls = new ArrayList(); + List urls = new ArrayList<>(); //urls.add(new Keyword("http://|https://|^www\\.", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)); urls.add(new Keyword("((((ht|f)tp(s?))\\://)|www\\.)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,5})(\\:[0-9]+)*(/($|[a-zA-Z0-9\\.\\,\\;\\?\\'\\\\+&%\\$#\\=~_\\-]+))*", false, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java index 6aa3e48967..60066bde4e 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsViewerPanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,6 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + package org.sleuthkit.autopsy.keywordsearch; import java.awt.Component; @@ -36,7 +37,6 @@ import javax.swing.event.ListSelectionListener; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableCellRenderer; import javax.swing.table.TableColumn; - import org.openide.util.NbBundle; import org.openide.util.actions.SystemAction; import org.sleuthkit.autopsy.coreutils.Logger; diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java index 5891f8a406..36f96fcd9b 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsXML.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 Basis Technology Corp. + * Copyright 2011-2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -57,8 +57,6 @@ public class KeywordSearchListsXML extends KeywordSearchListsAbstract{ private static final Logger logger = Logger.getLogger(KeywordSearchListsXML.class.getName()); private DateFormat dateFormatter; - - /** * Constructor to obtain handle on other that the current keyword list * (such as for import or export) From 1ef860294a0837095d324a9082435bcd60e17491 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 5 Mar 2014 17:27:20 -0500 Subject: [PATCH 20/48] Refactored keyword search (transitional) for new ingest API --- .../keywordsearch/KeywordListsManager.java | 126 ++++++++++++++++-- .../KeywordSearchEditListPanel.java | 10 +- .../KeywordSearchIngestModule.java | 95 ++----------- .../KeywordSearchIngestSimplePanel.java | 18 +-- .../KeywordSearchListsViewerPanel.java | 70 +++++----- .../KeywordSearchModuleFactory.java | 11 +- .../KeywordSearchQueryManager.java | 4 +- .../KeywordSearchResultFactory.java | 5 +- .../autopsy/keywordsearch/LuceneQuery.java | 26 ++-- .../keywordsearch/TermComponentQuery.java | 46 +++---- 10 files changed, 219 insertions(+), 192 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java index e01d0186fe..bfa011659e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordListsManager.java @@ -27,19 +27,129 @@ import java.util.Map; /** * RJCTODO: */ -public class KeywordListsManager { - private static KeywordListsManager instance = null; - private List keywords = new ArrayList<>(); //keywords to search - private List keywordLists = new ArrayList<>(); // lists currently being searched - private Map keywordToList = new HashMap<>(); +class KeywordListsManager { + private static KeywordListsManager instance = null; + private final KeywordLists defaultKeywordLists = new KeywordLists(); + private final HashMap keywordListsForIngestJobs = new HashMap<>(); + /** - * Gets the singleton instance of this class. + * Gets the keyword lists manager singleton. */ - public static synchronized KeywordListsManager getInstance() { + static synchronized KeywordListsManager getInstance() { if (null == instance) { instance = new KeywordListsManager(); } return instance; - } + } + + private KeywordListsManager() { + defaultKeywordLists.addKeywordLists(null); // RJCTODO: Not too fond of this trick... + } + + // RJCTODO: May need to change this one + synchronized void addKeywordListsToDefaultLists(List listNames) { + defaultKeywordLists.addKeywordLists(listNames); + } + + // RJCTODO: May not need this one + synchronized void addKeywordListsToAllIngestJobs(List listNames) { + for (KeywordLists listsForJob : keywordListsForIngestJobs.values()) { + listsForJob.addKeywordLists(listNames); + } + } + + synchronized void addKeywordListsToIngestJob(List listNames, long ingestJobId) { + KeywordLists listsForJob = keywordListsForIngestJobs.get(ingestJobId); + if (null == listsForJob) { + listsForJob = new KeywordLists(); + keywordListsForIngestJobs.put(ingestJobId, listsForJob); + } + listsForJob.addKeywordLists(listNames); + } + + synchronized List getDefaultKeywordLists() { + return defaultKeywordLists.getKeywordLists(); + } + + synchronized List getKeywordListsForIngestJob(long ingestJobId) { + KeywordLists listsForJob = keywordListsForIngestJobs.get(ingestJobId); + if (null == listsForJob) { + listsForJob = new KeywordLists(); + keywordListsForIngestJobs.put(ingestJobId, listsForJob); + } + return listsForJob.getKeywordLists(); + } + + synchronized List getKeywordListsForAllIngestJobs() { + List keywordLists = new ArrayList<>(); + for (KeywordLists listsForJob : keywordListsForIngestJobs.values()) { + List listNames = listsForJob.getKeywordLists(); + for (String listName : listNames) { + if (!keywordLists.contains(listName)) { + keywordLists.add(listName); + } + } + } + return keywordLists; + } + + synchronized void removeKeywordListsForIngestTask(long ingestTaskId) { + // RJCTODO: May want to have an event trigger this + keywordListsForIngestJobs.clear(); + } + + private static final class KeywordLists { + + // RJCTODO: Understand better how these are used + private List keywords = new ArrayList<>(); //keywords to search + private List keywordLists = new ArrayList<>(); // lists currently being searched + private Map keywordToList = new HashMap<>(); + + KeywordLists() { + addKeywordLists(null); + } + + List getKeywordLists() { + return new ArrayList<>(keywordLists); + } + + void addKeywordLists(List listNames) { + // Refresh everything to pick up changes to the keywords lists + // saved to disk. + // RJCTODO: Is this a good idea? Or should the XML file be read + // only once, in the constructor, now that there are lists per + // ingest job? + keywords.clear(); + keywordLists.clear(); + keywordToList.clear(); + + // StringBuilder sb = new StringBuilder(); + KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); + for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { + // Add the list by list name. + // RJCTODO: Understand this better. + String listName = list.getName(); + if ((list.getUseForIngest() == true) + || (null != listNames && listNames.contains(listName))) { + keywordLists.add(listName); + // sb.append(listName).append(" "); + } + + // Add the keywords from the list. + // RJCTODO: Understand this better - isn't this adding the + // keywords from every list, whether enabled for ingest or not? + for (Keyword keyword : list.getKeywords()) { + if (!keywords.contains(keyword)) { + keywords.add(keyword); + keywordToList.put(keyword.getQuery(), list); + } + } + } + + // RJCTODO: Was logging code that was here useful? If so, specify + // ingest job id in message, set up logger for this class + // logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString()); + } + } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java index eae85fb4aa..4dd33a1200 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchEditListPanel.java @@ -172,13 +172,13 @@ class KeywordSearchEditListPanel extends javax.swing.JPanel implements ListSelec String changed = evt.getPropertyName(); Object oldValue = evt.getOldValue(); if (changed.equals(IngestModuleEvent.COMPLETED.toString() ) - && ((String) oldValue).equals(KeywordSearchIngestModule.MODULE_NAME)) { + && ((String) oldValue).equals(KeywordSearchModuleFactory.getModuleName())) { initIngest(1); } else if (changed.equals(IngestModuleEvent.STARTED.toString() ) - && ((String) oldValue).equals(KeywordSearchIngestModule.MODULE_NAME)) { + && ((String) oldValue).equals(KeywordSearchModuleFactory.getModuleName())) { initIngest(0); } else if (changed.equals(IngestModuleEvent.STOPPED.toString() ) - && ((String) oldValue).equals(KeywordSearchIngestModule.MODULE_NAME)) { + && ((String) oldValue).equals(KeywordSearchModuleFactory.getModuleName())) { initIngest(1); } } @@ -219,9 +219,9 @@ class KeywordSearchEditListPanel extends javax.swing.JPanel implements ListSelec boolean noKeywords = !listSet ? true : currentKeywordList.getKeywords().isEmpty(); // Certain buttons will be disabled if ingest is ongoing on this list - List ingestLists = new ArrayList(); + List ingestLists = new ArrayList<>(); if (ingestOngoing) { - ingestLists = KeywordSearchIngestModule.getDefault().getKeywordLists(); + ingestLists = KeywordListsManager.getInstance().getDefaultKeywordLists(); } boolean inIngest = !listSet ? false : ingestLists.contains(currentKeywordList.getName()); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 220a857d91..b6cd7e503e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -49,7 +49,6 @@ import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; -import org.sleuthkit.autopsy.coreutils.Version; import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; @@ -79,7 +78,6 @@ import org.sleuthkit.datamodel.TskData.FileKnown; public final class KeywordSearchIngestModule implements FileIngestModule { enum UpdateFrequency { - FAST(20), AVG(10), SLOW(5), @@ -95,19 +93,13 @@ public final class KeywordSearchIngestModule implements FileIngestModule { return time; } }; + private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName()); - public static final String MODULE_NAME = NbBundle.getMessage(KeywordSearchIngestModule.class, - "KeywordSearchIngestModule.moduleName"); - public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class, - "KeywordSearchIngestModule.moduleDescription"); - final public static String MODULE_VERSION = Version.getVersion(); + private long ingestJobId; private IngestServices services; private Ingester ingester = null; private volatile boolean commitIndex = false; //whether to commit index next time private volatile boolean runSearcher = false; //whether to run searcher next time - private List keywords; //keywords to search - private List keywordLists; // lists currently being searched - private Map keywordToList; //keyword to list name mapping private Timer commitTimer; private Timer searchTimer; private Indexer indexer; @@ -129,8 +121,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { private static List textExtractors; private static AbstractFileStringExtract stringExtractor; private boolean initialized = false; - private KeywordSearchIngestSimplePanel simpleConfigPanel; - private KeywordSearchConfigurationPanel advancedConfigPanel; private Tika tikaFormatDetector; private enum IngestStatus { @@ -226,7 +216,8 @@ public final class KeywordSearchIngestModule implements FileIngestModule { postIndexSummary(); //run one last search as there are probably some new files committed - if (keywordLists != null && !keywordLists.isEmpty() && processedFiles == true) { + List keywordLists = KeywordListsManager.getInstance().getKeywordListsForIngestJob(ingestJobId); + if (!keywordLists.isEmpty() && processedFiles == true) { finalSearcher = new Searcher(keywordLists, true); //final searcher run finalSearcher.execute(); } else { @@ -293,10 +284,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { textExtractors = null; stringExtractor = null; - keywords.clear(); - keywordLists.clear(); - keywordToList.clear(); - tikaFormatDetector = null; initialized = false; @@ -308,7 +295,8 @@ public final class KeywordSearchIngestModule implements FileIngestModule { * */ @Override - public void init(long taskId) { + public void init(long ingestJobId) { + this.ingestJobId = ingestJobId; logger.log(Level.INFO, "init()"); services = IngestServices.getDefault(); initialized = false; @@ -357,16 +345,11 @@ public final class KeywordSearchIngestModule implements FileIngestModule { ingestStatus = new HashMap<>(); - keywords = new ArrayList<>(); - keywordLists = new ArrayList<>(); - keywordToList = new HashMap<>(); - - initKeywords(); - - if (keywords.isEmpty() || keywordLists.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } + // RJCTODO: Fetch lists for job and check? +// if (keywords.isEmpty() || keywordLists.isEmpty()) { +// services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), +// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); +// } processedFiles = false; finalSearcherDone = false; @@ -506,57 +489,6 @@ public final class KeywordSearchIngestModule implements FileIngestModule { } } - /** - * Initialize the keyword search lists and associated keywords from the XML - * loader Use the lists to ingest that are set in the permanent XML - * configuration - */ - private void initKeywords() { - addKeywordLists(null); - } - - /** - * If ingest is ongoing, this will add additional keyword search lists to - * the ongoing ingest The lists to add may be temporary and not necessary - * set to be added to ingest permanently in the XML configuration. The lists - * will be reset back to original (permanent configuration state) on the - * next ingest. - * - * @param listsToAdd lists to add temporarily to the ongoing ingest - */ - void addKeywordLists(List listsToAdd) { - KeywordSearchListsXML loader = KeywordSearchListsXML.getCurrent(); - - keywords.clear(); - keywordLists.clear(); - keywordToList.clear(); - - StringBuilder sb = new StringBuilder(); - - for (KeywordSearchListsAbstract.KeywordSearchList list : loader.getListsL()) { - final String listName = list.getName(); - if (list.getUseForIngest() == true - || (listsToAdd != null && listsToAdd.contains(listName))) { - keywordLists.add(listName); - sb.append(listName).append(" "); - } - for (Keyword keyword : list.getKeywords()) { - if (!keywords.contains(keyword)) { - keywords.add(keyword); - keywordToList.put(keyword.getQuery(), list); - } - } - - } - - logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString()); - - } - - List getKeywordLists() { - return keywordLists == null ? new ArrayList() : keywordLists; - } - /** * Check if time to commit, if so, run commit. Then run search if search * timer is also set. @@ -572,7 +504,8 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //in worst case, we will run search next time after commit timer goes off, or at the end of ingest if (searcherDone && runSearcher) { //start search if previous not running - if (keywordLists != null && !keywordLists.isEmpty()) { + List keywordLists = KeywordListsManager.getInstance().getKeywordListsForIngestJob(ingestJobId); + if (!keywordLists.isEmpty()) { currentSearcher = new Searcher(keywordLists); currentSearcher.execute();//searcher will stop timer and restart timer when done } @@ -1075,7 +1008,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { //update artifact browser if (!newArtifacts.isEmpty()) { - services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); + services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), ARTIFACT_TYPE.TSK_KEYWORD_HIT, newArtifacts)); } } //if has results diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestSimplePanel.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestSimplePanel.java index 7c6b8090d6..59dd21e258 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestSimplePanel.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestSimplePanel.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011 - 2013 Basis Technology Corp. + * Copyright 2011 - 2014 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -17,7 +17,6 @@ * limitations under the License. */ - package org.sleuthkit.autopsy.keywordsearch; import java.util.ArrayList; @@ -30,7 +29,7 @@ import javax.swing.table.TableColumn; import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT; /** - * Simple ingest config panel + * Ingest options panel for the keyword search file ingest module. */ public class KeywordSearchIngestSimplePanel extends javax.swing.JPanel { @@ -39,10 +38,9 @@ public class KeywordSearchIngestSimplePanel extends javax.swing.JPanel { private KeywordTableModel tableModel; private List lists; - /** Creates new form KeywordSearchIngestSimplePanel */ - public KeywordSearchIngestSimplePanel() { + KeywordSearchIngestSimplePanel() { tableModel = new KeywordTableModel(); - lists = new ArrayList(); + lists = new ArrayList<>(); reloadLists(); initComponents(); customizeComponents(); @@ -56,7 +54,7 @@ public class KeywordSearchIngestSimplePanel extends javax.swing.JPanel { //customize column witdhs final int width = listsScrollPane.getPreferredSize().width; listsTable.setAutoResizeMode(JTable.AUTO_RESIZE_NEXT_COLUMN); - TableColumn column = null; + TableColumn column; for (int i = 0; i < listsTable.getColumnCount(); i++) { column = listsTable.getColumnModel().getColumn(i); if (i == 0) { @@ -183,7 +181,6 @@ public class KeywordSearchIngestSimplePanel extends javax.swing.JPanel { // End of variables declaration//GEN-END:variables private void reloadLangs() { - //TODO multiple List