v) {
return v.visit(this);
@@ -358,10 +381,12 @@ public final class FileTypesByMimeType extends Observable implements AutopsyVisi
* results
*/
private void updateDisplayName(String mimeType) {
- final long count = new MediaSubTypeNodeChildren(mimeType).calculateItems(skCase, mimeType);
+ final String count = shouldShowCounts(skCase)
+ ? " (" + Long.toString(new MediaSubTypeNodeChildren(mimeType).calculateItems(skCase, mimeType)) + ")"
+ : "";
String[] mimeTypeParts = mimeType.split("/");
//joins up all remaining parts of the mimeType into one sub-type string
- super.setDisplayName(StringUtils.join(ArrayUtils.subarray(mimeTypeParts, 1, mimeTypeParts.length), "/") + " (" + count + ")");
+ super.setDisplayName(StringUtils.join(ArrayUtils.subarray(mimeTypeParts, 1, mimeTypeParts.length), "/") + count);
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java b/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java
index f488e741e3..f237243dd0 100644
--- a/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java
+++ b/Core/src/org/sleuthkit/autopsy/directorytree/ViewContextAction.java
@@ -55,7 +55,7 @@ import org.sleuthkit.datamodel.VolumeSystem;
* selecting the parent in the tree view, then selecting the content in the
* results view.
*/
-public final class ViewContextAction extends AbstractAction {
+public class ViewContextAction extends AbstractAction {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(ViewContextAction.class.getName());
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java
index 59b5b64d9c..5b522d552a 100755
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleFactory.java
@@ -49,7 +49,7 @@ package org.sleuthkit.autopsy.ingest;
* implementations must be marked with the following NetBeans Service provider
* annotation:
*
- * @ServiceProvider(service=IngestModuleFactory.class)
+ * \@ServiceProvider(service=IngestModuleFactory.class)
*
* IMPORTANT TIP: If an implementation of IngestModuleFactory does not need to
* provide implementations of all of the IngestModuleFactory methods, it can
diff --git a/Experimental/build.xml b/Experimental/build.xml
index 07de3bd2f4..b3923a5c30 100644
--- a/Experimental/build.xml
+++ b/Experimental/build.xml
@@ -13,17 +13,13 @@
-
+
-
-
-
-
diff --git a/Experimental/ivy.xml b/Experimental/ivy.xml
index 4b336cfd9e..e50fab6472 100644
--- a/Experimental/ivy.xml
+++ b/Experimental/ivy.xml
@@ -3,8 +3,6 @@
-
-
@@ -13,13 +11,5 @@
-
-
-
-
-
-
-
-
diff --git a/Experimental/manifest.mf b/Experimental/manifest.mf
index eb45e376b8..cafb630948 100644
--- a/Experimental/manifest.mf
+++ b/Experimental/manifest.mf
@@ -4,4 +4,3 @@ OpenIDE-Module: org.sleuthkit.autopsy.experimental
OpenIDE-Module-Layer: org/sleuthkit/autopsy/experimental/autoingest/layer.xml
OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties
OpenIDE-Module-Specification-Version: 1.0
-OpenIDE-Module-Install: org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/eventlisteners/Installer.class
\ No newline at end of file
diff --git a/Experimental/nbproject/project.xml b/Experimental/nbproject/project.xml
index c990077688..280fbc431b 100644
--- a/Experimental/nbproject/project.xml
+++ b/Experimental/nbproject/project.xml
@@ -110,7 +110,7 @@
10
- 10.7
+ 10.8
@@ -156,26 +156,6 @@
ext/c3p0-0.9.5.jar
release/modules/ext/c3p0-0.9.5.jar
-
- ext/commons-dbcp2-2.1.1.jar
- release/modules/ext/commons-dbcp2-2.1.1.jar
-
-
- ext/commons-logging-1.2.jar
- release/modules/ext/commons-logging-1.2.jar
-
-
- ext/commons-pool2-2.4.2.jar
- release/modules/ext/commons-pool2-2.4.2.jar
-
-
- ext/postgresql-42.0.0.jar
- release/modules/ext/postgresql-42.0.0.jar
-
-
- ext/sqlite-jdbc-3.16.1.jar
- release/modules/ext/sqlite-jdbc-3.16.1.jar
-
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java
index 4c1cea69b8..afae397553 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java
@@ -774,8 +774,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang
}
}
-
-
/**
* Removes a set of auto ingest jobs from a collection of jobs.
*
@@ -915,8 +913,11 @@ public final class AutoIngestManager extends Observable implements PropertyChang
AutoIngestManager.this.completedJobs = newCompletedJobsList;
} catch (Exception ex) {
- /* NOTE: Need to catch all exceptions here. Otherwise uncaught exceptions will
- propagate up to the calling thread and may stop it from running.*/
+ /*
+ * NOTE: Need to catch all exceptions here. Otherwise
+ * uncaught exceptions will propagate up to the calling
+ * thread and may stop it from running.
+ */
SYS_LOGGER.log(Level.SEVERE, String.format("Error scanning the input directory %s", rootInputDirectory), ex);
}
}
@@ -2274,6 +2275,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
* is shutting down.
*/
ingestLock.wait();
+ SYS_LOGGER.log(Level.INFO, "Finished ingest modules analysis for {0} ", manifestPath);
IngestJob.ProgressSnapshot jobSnapshot = ingestJob.getSnapshot();
for (IngestJob.ProgressSnapshot.DataSourceProcessingSnapshot snapshot : jobSnapshot.getDataSourceSnapshots()) {
if (!snapshot.isCancelled()) {
@@ -2505,9 +2507,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang
String eventType = event.getPropertyName();
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
synchronized (ingestLock) {
- if (!IngestManager.getInstance().isIngestRunning()) {
- ingestLock.notify();
- }
+ ingestLock.notify();
}
}
}
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java
index 0811a52c2b..e7b633a133 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/SharedConfiguration.java
@@ -79,7 +79,7 @@ public class SharedConfiguration {
private static final String HASHDB_CONFIG_FILE_NAME = "hashLookup.settings"; //NON-NLS
private static final String HASHDB_CONFIG_FILE_NAME_LEGACY = "hashsets.xml"; //NON-NLS
public static final String FILE_EXPORTER_SETTINGS_FILE = "fileexporter.settings"; //NON-NLS
- private static final String ENTERPRISE_ARTIFACTS_MANAGER_PROPERTIES_FILE = "EnterpriseArtifactsManager.properties"; //NON-NLS
+ private static final String CENTRAL_REPOSITORY_PROPERTIES_FILE = "CentralRepository.properties"; //NON-NLS
private static final String SHARED_CONFIG_VERSIONS = "SharedConfigVersions.txt"; //NON-NLS
// Folders
@@ -206,7 +206,7 @@ public class SharedConfiguration {
uploadMultiUserAndGeneralSettings(remoteFolder);
uploadHashDbSettings(remoteFolder);
uploadFileExporterSettings(remoteFolder);
- uploadEnterpriseArtifactsManagerSettings(remoteFolder);
+ uploadCentralRepositorySettings(remoteFolder);
try {
Files.deleteIfExists(uploadInProgress.toPath());
@@ -271,7 +271,7 @@ public class SharedConfiguration {
downloadFileExtMismatchSettings(remoteFolder);
downloadAndroidTriageSettings(remoteFolder);
downloadFileExporterSettings(remoteFolder);
- downloadEnterpriseArtifactsManagerSettings(remoteFolder);
+ downloadCentralRepositorySettings(remoteFolder);
// Download general settings, then restore the current
// values for the unshared fields
@@ -778,27 +778,27 @@ public class SharedConfiguration {
}
/**
- * Upload Enterprise Artifacts Manager settings.
+ * Upload Central Repository settings.
*
* @param remoteFolder Shared settings folder
*
* @throws SharedConfigurationException
*/
- private void uploadEnterpriseArtifactsManagerSettings(File remoteFolder) throws SharedConfigurationException {
- publishTask("Uploading Enterprise Artifacts Manager configuration");
- copyToRemoteFolder(ENTERPRISE_ARTIFACTS_MANAGER_PROPERTIES_FILE, moduleDirPath, remoteFolder, true);
+ private void uploadCentralRepositorySettings(File remoteFolder) throws SharedConfigurationException {
+ publishTask("Uploading Central Repository configuration");
+ copyToRemoteFolder(CENTRAL_REPOSITORY_PROPERTIES_FILE, moduleDirPath, remoteFolder, true);
}
/**
- * Download Enterprise Artifacts Manager settings.
+ * Download Central Repository settings.
*
* @param remoteFolder Shared settings folder
*
* @throws SharedConfigurationException
*/
- private void downloadEnterpriseArtifactsManagerSettings(File remoteFolder) throws SharedConfigurationException {
- publishTask("Downloading Enterprise Artifacts Manager configuration");
- copyToLocalFolder(ENTERPRISE_ARTIFACTS_MANAGER_PROPERTIES_FILE, moduleDirPath, remoteFolder, true);
+ private void downloadCentralRepositorySettings(File remoteFolder) throws SharedConfigurationException {
+ publishTask("Downloading Central Repository configuration");
+ copyToLocalFolder(CENTRAL_REPOSITORY_PROPERTIES_FILE, moduleDirPath, remoteFolder, true);
}
/**
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/Bundle.properties b/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/Bundle.properties
deleted file mode 100644
index ad927009be..0000000000
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/Bundle.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-OpenIDE-Module-Display-Category=Ingest Module
-OpenIDE-Module-Name=Enterprise Artifacts Manager
-OpenIDE-Module-Short-Description=Enterprise Artifacts Manager Ingest Module
-OpenIDE-Module-Long-Description=\
- Enterprise Artifacts Manager ingest module. \n\n\
- This ingest module stores artifacts of selected types into a central database.\n\
- Stored artifacts are used by future cases to compare and analyzes file and artifacts during ingest.
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/datamodel/EamArtifact.java b/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/datamodel/EamArtifact.java
deleted file mode 100644
index dd33b5e0e9..0000000000
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/datamodel/EamArtifact.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Enterprise Artifacts Manager
- *
- * Copyright 2015-2017 Basis Technology Corp.
- * Contact: carrier sleuthkit org
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.sleuthkit.autopsy.experimental.enterpriseartifactsmanager.datamodel;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-
-/**
- *
- * Used to store info about a specific artifact.
- */
-public class EamArtifact implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private String ID;
- private String artifactValue;
- private Type artifactType;
- private final List artifactInstances;
-
- /**
- * Load the default correlation artifact types
- */
- public static List getDefaultArtifactTypes() {
- List DEFAULT_ARTIFACT_TYPES = new ArrayList<>();
- DEFAULT_ARTIFACT_TYPES.add(new EamArtifact.Type("FILES", true, true)); // NON-NLS
- DEFAULT_ARTIFACT_TYPES.add(new EamArtifact.Type("DOMAIN", true, false)); // NON-NLS
- DEFAULT_ARTIFACT_TYPES.add(new EamArtifact.Type("EMAIL", true, false)); // NON-NLS
- DEFAULT_ARTIFACT_TYPES.add(new EamArtifact.Type("PHONE", true, false)); // NON-NLS
- DEFAULT_ARTIFACT_TYPES.add(new EamArtifact.Type("USBID", true, false)); // NON-NLS
- return DEFAULT_ARTIFACT_TYPES;
- }
-
- public EamArtifact(Type artifactType, String artifactValue) {
- this.ID = "";
- this.artifactType = artifactType;
- this.artifactValue = artifactValue;
- this.artifactInstances = new ArrayList<>();
- }
-
- public Boolean equals(EamArtifact otherArtifact) {
- return ((this.getID().equals(otherArtifact.getID()))
- && (this.getArtifactType().equals(otherArtifact.getArtifactType()))
- && (this.getArtifactValue().equals(otherArtifact.getArtifactValue()))
- && (this.getInstances().equals(otherArtifact.getInstances())));
- }
-
- @Override
- public String toString() {
- String result = this.getID()
- + this.getArtifactType().toString()
- + this.getArtifactValue();
- result = this.getInstances().stream().map((inst) -> inst.toString()).reduce(result, String::concat);
- return result;
- }
-
- /**
- * @return the ID
- */
- public String getID() {
- return ID;
- }
-
- /**
- * @param ID the ID to set
- */
- public void setID(String ID) {
- this.ID = ID;
- }
-
- /**
- * @return the artifactValue
- */
- public String getArtifactValue() {
- return artifactValue;
- }
-
- /**
- * @param artifactValue the artifactValue to set
- */
- public void setArtifactValue(String artifactValue) {
- this.artifactValue = artifactValue;
- }
-
- /**
- * @return the artifact Type
- */
- public Type getArtifactType() {
- return artifactType;
- }
-
- /**
- * @param artifactType the artifact Type to set
- */
- public void setArtifactType(Type artifactType) {
- this.artifactType = artifactType;
- }
-
- /**
- * @return the List of artifactInstances; empty list of none have been
- * added.
- */
- public List getInstances() {
- return new ArrayList<>(artifactInstances);
- }
-
- /**
- * @param artifactInstances the List of artifactInstances to set.
- */
- public void setInstances(List artifactInstances) {
- this.artifactInstances.clear();
- if (null != artifactInstances) {
- this.artifactInstances.addAll(artifactInstances);
- }
- }
-
- /**
- * @param instance the instance to add
- */
- public void addInstance(EamArtifactInstance artifactInstance) {
- this.artifactInstances.add(artifactInstance);
- }
-
- public static class Type implements Serializable {
-
- private int id;
- private String name;
- private Boolean supported;
- private Boolean enabled;
-
- public Type(int id, String name, Boolean supported, Boolean enabled) {
- this.id = id;
- this.name = name;
- this.supported = supported;
- this.enabled = enabled;
- }
-
- public Type(String name, Boolean supported, Boolean enabled) {
- this(-1, name, supported, enabled);
- }
-
- /**
- * Determine if 2 Type objects are equal based on having the same
- * Type.name.
- *
- * @param otherType Type object for comparison.
- *
- * @return true or false
- */
- @Override
- public boolean equals(Object that) {
- if (this == that) {
- return true;
- } else if (!(that instanceof EamArtifact.Type)) {
- return false;
- } else {
- return ((EamArtifact.Type) that).sameType(this);
- }
- }
-
- /**
- * Determines if the content of this artifact type object is equivalent
- * to the content of another artifact type object.
- *
- * @param that the other type
- *
- * @return true if it is the same type
- */
- private boolean sameType(EamArtifact.Type that) {
- return this.id == that.getId()
- && this.name.equals(that.getName())
- && Objects.equals(this.supported, that.isSupported())
- && Objects.equals(this.enabled, that.isEnabled());
- }
-
- @Override
- public int hashCode() {
- int hash = 7;
- hash = 67 * hash + Objects.hashCode(this.id);
- hash = 67 * hash + Objects.hashCode(this.name);
- hash = 67 * hash + Objects.hashCode(this.supported);
- hash = 67 * hash + Objects.hashCode(this.enabled);
- return hash;
- }
-
- @Override
- public String toString() {
- StringBuilder str = new StringBuilder();
- str.append("(id=").append(id);
- str.append(", name=").append(name);
- str.append(", supported=").append(supported.toString());
- str.append(", enabled=").append(enabled.toString());
- str.append(")");
- return str.toString();
- }
-
- /**
- * @return the id
- */
- public int getId() {
- return id;
- }
-
- /**
- * @param id the id to set
- */
- public void setId(int id) {
- this.id = id;
- }
-
- /**
- * Get the name of this Artifact Type.
- *
- * @return the name
- */
- public String getName() {
- return name;
- }
-
- /**
- * Set the name of this Artifact Type
- *
- * @param name the name to set
- */
- public void setName(String name) {
- this.name = name;
- }
-
- /**
- * Check if this Artifact Type is supported.
- *
- * @return true or false
- */
- public Boolean isSupported() {
- return supported;
- }
-
- /**
- * Set this Artifact Type as supported or not supported.
- *
- * @param supported the supported to set
- */
- public void setSupported(Boolean supported) {
- this.supported = supported;
- }
-
- /**
- * Check if this Artifact Type is enabled.
- *
- * @return true or false
- */
- public Boolean isEnabled() {
- return enabled;
- }
-
- /**
- * Set this Artifact Type as enabled or not enabled.
- *
- * @param enabled the enabled to set
- */
- public void setEnabled(Boolean enabled) {
- this.enabled = enabled;
- }
- }
-}
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/CONFIG.md b/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/CONFIG.md
deleted file mode 100644
index d3bd10561e..0000000000
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/CONFIG.md
+++ /dev/null
@@ -1,140 +0,0 @@
-# Configuration
-
-## Database Setup
-
-There are 2 choices for database platforms: SQLite and PostgreSQL.
-1. SQLite is a database in a file stored locally on the same host that is running Autopsy.
-There is nothing to do to setup this database. It will be created by Autopsy on your
-behalf, if it doesn't already exist.
-2. PostgreSQL is a database server that can be run either on the same host that is
-running Autopsy or on a remote server. To use PostgreSQL with the EnterpriseArtifactManager module,
-you will need the server to be running, have an existing database named "enterpriseartifactmanagerdb"
-and have an existing user/pass with ownership of the enterpriseartifactmanagerdb database.
-The tables and indices will be automatically created by Autopsy.
-See the [Autopsy multi-user settings documentation[(http://sleuthkit.org/autopsy/docs/user-docs/4.3/install_postgresql.html) for help setting up your PostgreSQL server.
-
-## Enable Module and Configure Database Settings
-
-In the menu go to: Tools -> Options -> EnterpriseArtifactManager
-
-1. Check the box to Enable Enterprise Artifact Manager. This will enable the Database Platform dropdown and Configure button.
-2. In the dropdown, select the database platform that you want to use.
-3. Click the Configure button to configure the settings for the chosen database platform.
-4. Click the Apply button to save your database configuration settings.
-
-### Configure SQLite Database Settings
-
-There is only one step here, to specify the path and filename for the database.
-You can accept the default value or use the Open button to choose another path.
-The database file name can be called anything you want, but it is convenient to
-give it a ".db" suffix.
-
-Once you have selected the path, click the Test Connection button.
-If you see a green check next to the button, everything is ready to go.
-If you see a red check next to the button, there is a problem with the path
-you selected and you'll have to resolve that problem.
-
-Once the test passes, click the Save button to save your selection and close the window.
-
-### Configure PostgreSQL Database Settings
-
-For PostgreSQL all values are required, but some defaults are provided for convenience.
-
-1. Host Name/IP is the hostname or IP of your PostgreSQL server.
-2. Port is the port that the PostgreSQL server is listening on; default is 5432.
-3. Database name is the name of the database you are using for this module; default is enterpriseartifactmanagerdb.
-4. User Name is the PostgreSQL user that owns and has full permissions to the database specified in step 3.
-5. User Password is the password for the user.
-
-Once all values have been entered, click the Test Connection button.
-If you see a green check next to the button, everything is ready to go.
-If you see a red check next to the button, there is a problem with the values
-you entered and you'll have to resolve that problem.
-
-Once the test passes, click the Save button to save your selection and close the window.
-
-## Import Globally Known Artifacts
-
-The purpose of this feature is to store any Known or Known Bad Artifacts in
-the database. Think of this feature like a dynamic Hash List.
-These artifacts are used during Ingest to flag files as Interesting.
-They are also displayed in the Content Viewer when a file or artifact is selected that is
-associated with one of the globally known artifacts.
-
-When importing a hash database, all fields are required.
-
-1. Select the Database Path using the Open button. This is the file containing
-the hash values that you want to import. You can import multiple files, but only
-one at a time. The format of these files must be the same format as used by
-the hash database module.
-2. Select the database type. The type of content in the database being imported.
-3. Define the attribution for this database.
- a. Select the Source Organization in the dropdown list.
-This is the organization that provided the hash database to you.
- b. If you do not see the Organization in the list, use the [Add New Organization](FEATURES.md#adding-a-new-organization) button to add it.
-Once you add it, you can then select it in the dropdown list.
- c. Enter a name for the dataset. This can be anything you want, but is often something like "child porn", "drugs", "malware", "corp hashlist", etc.
- d. Enter a version number for that dataset. This can be anything you want, but is often something like "1.0", "1.1a", 20170505", etc.
-4. Click the OK button to start the import.
-
-## Manage Correlatable Tags
-
-In Autopsy, you are allowed to define your own Tag names, tag files and artifacts,
- and add comments when you tag a file or artifact.
-
-The purpose of this feature is to associate one or more of those tags with this module
-to be used for Correlation.
-By default there is a tag called "Evidence" as the only tag associated with this module.
-
-To associate one or more tag(s) with this module, check the Correlate box next to the tag
-name(s) and click OK.
-
-### What does it mean for a tag to be associated with this module?
-
-Any file or artifact that a user tags with one of the associated tags will be
-added to the database as a file or artifact of interest.
-Any future data source ingest, where this module is enabled, will use those
-files or artifacts as if they were part of the Known Bad list, causing matching files
-from that ingest to be added to the Interesting Artifacts list in that currently open case.
-
-The term Correlate means that files processed during a future ingest will be correlated
-with files existing in the database.
-
-As an example, I have a case open and I tag an image called "evilphoto.png" with the
-default "Evidence" tag. That image will be stored in the database as a file of interest.
-In the next data source that I ingest for the same case or a future case,
-if an image with the same MD5 hash as "evilphoto.png"
-is found, it will be automatically added to the Interesting Files tree and assumed
-to be evidence.
-This makes it easy to find and flag things in future cases that you know are
-Interesting.
-
-## Manage Correlation Types
-
-This feature allows the user to control how much data is being stored in the database
-to use for correlation and analysis.
-By default, only FILES is enabled.
-Select the types that you want to enable and click OK.
-
-The meaning of each type is as follows:
-
-* FILES - file path and MD5 hash
-* DOMAIN - domain name
-* EMAIL - email address
-* PHONE - phone number
-* USBID - device ID of connected USB devices.
-
-### What does Correlation mean?
-
-Artifacts stored in the database are available for this module to use for analysis.
-That analysis comes in many forms.
-When a file or artifact is extracted during ingest, this module will use the database
-to find other files or artifacts that match it, to determine if that new file should be
-flagged as an Interesting File.
-
-If that file or artifact does not exist in the database, and that Correlation Type
-is enabled, then it will be added to the database.
-
-Having more data in the database will obviously allow this module to be more thorough,
-but for some, database size is a concern, so we allow them to select a subset of data
-to collect and use for analysis.
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/DEVELOP.md b/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/DEVELOP.md
deleted file mode 100644
index 5b61ce70c4..0000000000
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/DEVELOP.md
+++ /dev/null
@@ -1,61 +0,0 @@
-# Instructions for doing development of Autopsy Modules
-
-## On Windows, Setup your development environment with Autopsy sources and javadocs
-
-* Install x64 PostgreSQL and setup:
- * http://sleuthkit.org/autopsy/docs/user-docs/4.3/install_postgresql.html
-
-* Install Oracle Java SE JDK 8 - Windows x64 from Oracle:
- * http://www.oracle.com/technetwork/java/javase/downloads/jdk8-downloads-2133151.html
-
-* Install NetBeans (choose the 'All' version):
- * https://netbeans.org/downloads/
-
-* Install Git for Windows x64:
- * https://git-scm.com/downloads
-
-* Install doxygen and make sure it is added to your PATH
- * http://www.stack.nl/~dimitri/doxygen/download.html
-
-* Sleuthkit and the DataModel java bindings
- * Clone sleuthkit repo and set TSK_HOME environment variable.
- * For the java bindings, there are two ways to get these
- 1. [build Sleuthkit and then the java bindings](https://github.com/sleuthkit/sleuthkit/blob/develop/win32/BUILDING.txt), requiring Visual Studio and several
- dependant libraries.
- 2. download the [Autopsy dev platform zip](https://github.com/sleuthkit/autopsy/releases/download/autopsy-4.4.0/autopsy-4.4.0-devplatform.zip) and copy autopsy-4.4.0-devplatform/autopsy/modules/ext/Tsk_DataModel_PostgreSQL.jar to TSK_HOME/bindings/java/dist/
- * Set up environment variables, sample values:
- - JAVA_HOME=C:\Program Files\Java\jdk1.8.0_121
- - JDK_HOME=C:\Program Files\Java\jdk1.8.0_121
- - JRE_HOME_64=C:\Program Files\Java\jre1.8.0_121
- - LIBEWF_HOME=C:\libewf_64bit (only needed if you chose option #1 above)
- - LIBVHDI_HOME=C:\libvhdi_64bit (only needed if you chose option #1 above)
- - POSTGRESQL_HOME_64=c:\Program Files\PostgreSQL\9.6 (only needed if you chose option #1 above)
- - TSK_HOME=c:\sleuthkit
- - PATH=...;C:\Program Files\Java\jdk1.8.0_121\bin;C:\Program Files\NetBeans 8.2\extide\ant\bin;C:\Program Files\doxygen\bin
-
-* Build Autopsy platform:
- * Reference: https://github.com/sleuthkit/autopsy/blob/develop/BUILDING.txt
- * Clone Autopsy project
- * git clone git@github.com:sleuthkit/autopsy.git
- * git checkout develop
- * Add Autopsy project to NetBeans
- * File -> Open Project
- * Build the top level Autopsy project
- * Generate javadoc and add doc folder in the documentation tab
-
-If the project builds correctly, everything is installed correctly.
-
-## How to build disk images for development/testing
-
-Refer to MS technet instructions for creating/using a VHD: https://technet.microsoft.com/en-us/library/gg318052(v=ws.10).aspx
-
-But here is the general idea:
-* On Windows, use Disk Management tool to create a Virtual Hard Disk (.vhd) using the "dynamically expanding" disk format. Choose a small-ish disk size if you want the testing to be quick.
-* Initialize the disk (Initialize Disk).
-* Format the disk (New Simple Volume).
-* Mount that disk (Attach VHD)
-* Copy some files onto the disk.
-* Umount that disk (Detach VHD). Do NOT delete the disk when detaching!
-
-Repeat the above steps to create additional disk images.
-
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/FEATURES.md b/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/FEATURES.md
deleted file mode 100644
index de96fee0a0..0000000000
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/enterpriseartifactsmanager/docs/FEATURES.md
+++ /dev/null
@@ -1,116 +0,0 @@
-# Features
-
-Once you have configured everything, created a case, and have run the ingest of at least one data source,
-you can make use of some other exciting features that are described below.
-
-## Content Viewer
-
-This module adds a new tab to the [Content Viewer](http://sleuthkit.org/autopsy/docs/user-docs/4.3/content_viewer_page.html).
-The tab for this module is called "Other Cases".
-It can display data that is found in other cases, other data sources for the same case, or imported global artifacts.
-
-If at least one other case or data source has been ingested with this module enabled,
-there is a potential that data will be displayed in the Other Cases content viewer.
-If the selected file or artifact is associated by one of the supported Correlation Types,
-to one or more file(s) or artifact(s) in the database, the associated files/artifacts will be displayed.
-Note: the Content Viewer will display ALL associated files and artifacts available in the database.
-It ignores the user's [enabled/disabled Correlation Types](CONFIG.md#manage-correlation-types).
-
-If the user right-clicks on a row, a menu will be displayed.
-This menu has several options.
-1. [Show Commonality Details](FEATURES.md#show-commonality-details)
-2. [Save to CSV](FEATURES.md#save-to-csv)
-3. [Show Case Details](FEATURES.md#show-case-details)
-4. [Select All](FEATURES.md#select-all)
-
-Click option for more details.
-
-### Rows in the table
-
-By default, the rows in the content viewer will have background colors to indicate if they
-are known to be of interest.
-Files/artifacts that are Known Bad will have a Red background, Unknown will have Yellow background,
-and Known will have a White background.
-
-The user can click on any column heading to sort by the values in that column.
-
-### Show Commonality Details
-
-The concept of Commonality simply means, how common is the selected file.
-The value is the percentage of case/data source tuples that have the selected file or artifact.
-
-### Save to CSV
-
-This option will save ALL SELECTED rows in the Content Viewer table to a CSV file.
-By default, the CSV file is saved into the Export directory inside the currently open Autopsy case,
-but the user is free to select a different location.
-
-Note: if you want to copy/paste rows, it is usually possible to use CTRL+C to copy the
-selected rows and then CTRL+V to paste them into a file, but it will not be CSV formatted.
-
-### Show Case Details
-
-This option will open a dialog that displays all of the relevant details for the selected case.
-The details will include:
-1. Case UUID
-2. Case Name
-3. Case Creation Date
-4. Case Examiner contact information
-5. Case Examiner's notes
-
-These details would have been entered by the examiner of the selected case, by visiting
-the Case -> Enterprise Artifact Manager Case Details menu, when that case was open.
-
-### Select All
-
-This option will select all rows in the Content Viewer table.
-
-## Interesting Items tree
-
-In the Results tree of an open case is an entry called Interesting Items.
-When this module is enabled, all of the enabled Correlatable Types will cause
-matching files to be added to this Interesting Items tree during ingest.
-
-As an example, if the FILES Correlatable Type is enabled, and the ingest is
-currently processing a file, for example "badfile.exe", and the MD5 hash for that
-file already exists in the database as a KNOWN BAD file, then an entry in the Interesting Items tree
-will be added for the current instance of "badfile.exe" in the data source currently being ingested.
-
-The same type of thing will happen for each [enabled Correlatable Type](CONFIG.md#manage-correlation-types).
-
-In the case of the PHONE correlatable type, the Interesting Items tree will start
-a sub-tree for each phone number. The sub-tree will then contain each instance of that
-Known Bad phone number.
-
-## Edit Enterprise Artifact Manager Case Details
-
-By default, Autopsy lets you edit Case Details in the Case menu.
-When this module is enabled, there is an additional option in the Case menu,
-called "Enterprise Artifact Manager Case Details".
-
-This is where the examiner can store a number of details about the case.
-1. The organization of the case examiner.
-2. The contact information of the case examiner.
-3. The case examiner's case notes.
-
-To define the organization of the case examiner, simply select the organization name
-from the dropdown box.
-If the organization is not listed, you can click [Add New Organization](FEATURES.md#adding-a-new-organization) button.
-Once the new organization is added, it should be available in the dropdown box.
-
-## Adding a New Organization
-
-An Organization can have two purposes in this module.
-
-1. It defines the Organization that the forensic examiner belongs to.
-This organization is selected or added when Editing Correlation Case Details.
-2. It defines the Organization that is the source of a Globally Known Artifact List.
-This organization is selected or added during Import of a Globally Known Artifact hash list.
-
-When adding a new organization, only the Organization Name is required.
-It is recommended to also include a Point of Contact for that organization.
-This will be someone that is a manager or team lead at that Organization that
-could be contacted for any questions about a case or a shared Globally Known Artifact
-hash list.
-
-Click OK to save the new Organization.
diff --git a/ImageGallery/nbproject/project.xml b/ImageGallery/nbproject/project.xml
index b9e8b60120..14e60b0a5a 100644
--- a/ImageGallery/nbproject/project.xml
+++ b/ImageGallery/nbproject/project.xml
@@ -127,7 +127,7 @@
10
- 10.7
+ 10.8
diff --git a/KeywordSearch/nbproject/project.xml b/KeywordSearch/nbproject/project.xml
index 45abae006a..d375e76868 100644
--- a/KeywordSearch/nbproject/project.xml
+++ b/KeywordSearch/nbproject/project.xml
@@ -119,7 +119,7 @@
10
- 10.7
+ 10.8
diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties
index 1a14ddf308..99d88938ce 100644
--- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties
+++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties
@@ -236,10 +236,10 @@ KeywordSearchGlobalLanguageSettingsPanel.ingestSettingsLabel.text=Ingest setting
KeywordSearchGlobalLanguageSettingsPanel.enableUTF16Checkbox.text=Enable UTF16LE and UTF16BE string extraction
KeywordSearchGlobalLanguageSettingsPanel.languagesLabel.text=Enabled scripts (languages):
KeywordSearchGlobalSearchSettingsPanel.timeRadioButton1.toolTipText=20 mins. (fastest ingest time)
-KeywordSearchGlobalSearchSettingsPanel.timeRadioButton1.text=20 minutes
+KeywordSearchGlobalSearchSettingsPanel.timeRadioButton1.text=20 minutes (slowest feedback, fastest ingest)
KeywordSearchGlobalSearchSettingsPanel.timeRadioButton2.toolTipText=10 minutes (faster overall ingest time than default)
-KeywordSearchGlobalSearchSettingsPanel.timeRadioButton2.text=10 minutes
-KeywordSearchGlobalSearchSettingsPanel.frequencyLabel.text=Results update frequency during ingest (we have not seen significant performance differences between 5, 10, or 20 minute intervals):
+KeywordSearchGlobalSearchSettingsPanel.timeRadioButton2.text=10 minutes (slower feedback, faster ingest)
+KeywordSearchGlobalSearchSettingsPanel.frequencyLabel.text=Results update frequency during ingest:
KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.toolTipText=Requires Hash DB service to had run previously, or be selected for next ingest.
KeywordSearchGlobalSearchSettingsPanel.skipNSRLCheckBox.text=Do not add files in NSRL (known files) to keyword index during ingest
KeywordSearchGlobalSearchSettingsPanel.informationLabel.text=Information
@@ -249,7 +249,7 @@ KeywordSearchGlobalSearchSettingsPanel.filesIndexedLabel.text=Files in keyword i
KeywordSearchGlobalSearchSettingsPanel.showSnippetsCB.text=Show Keyword Preview in Keyword Search Results (will result in longer search times)
KeywordSearchGlobalSearchSettingsPanel.chunksValLabel.text=0
KeywordSearchGlobalSearchSettingsPanel.timeRadioButton4.toolTipText=1 minute (overall ingest time will be longest)
-KeywordSearchGlobalSearchSettingsPanel.timeRadioButton4.text_1=1 minute
+KeywordSearchGlobalSearchSettingsPanel.timeRadioButton4.text_1=1 minute (faster feedback, longest ingest)
KeywordSearchGlobalSearchSettingsPanel.chunksLabel.text=Chunks in keyword index:
KeywordSearchGlobalSearchSettingsPanel.timeRadioButton3.toolTipText=5 minutes (overall ingest time will be longer)
KeywordSearchGlobalSearchSettingsPanel.timeRadioButton3.text=5 minutes (default)
diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java
index 08a6b38165..830c93325a 100644
--- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java
+++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java
@@ -161,10 +161,10 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
try {
Index indexInfo = server.getIndexInfo();
if (!IndexFinder.getCurrentSolrVersion().equals(indexInfo.getSolrVersion())) {
- throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSolrVersionNotSupported(indexInfo.getSolrVersion()));
+ throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSolrVersionNotSupported(indexInfo.getSolrVersion()));
}
if (!IndexFinder.getCurrentSchemaVersion().equals(indexInfo.getSchemaVersion())) {
- throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSchemaNotSupported(indexInfo.getSchemaVersion()));
+ throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupException_indexSchemaNotSupported(indexInfo.getSchemaVersion()));
}
} catch (NoOpenCoreException ex) {
throw new IngestModuleException(Bundle.KeywordSearchIngestModule_startupMessage_failedToGetIndexSchema(), ex);
@@ -249,7 +249,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
public ProcessResult process(AbstractFile abstractFile) {
if (initialized == false) //error initializing indexing/Solr
{
- logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); //NON-NLS
+ logger.log(Level.SEVERE, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); //NON-NLS
putIngestStatus(jobId, abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
return ProcessResult.OK;
}
@@ -293,14 +293,16 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
*/
@Override
public void shutDown() {
- logger.log(Level.INFO, "Instance {0}", instanceNum); //NON-NLS
+ logger.log(Level.INFO, "Keyword search ingest module instance {0} shutting down", instanceNum); //NON-NLS
if ((initialized == false) || (context == null)) {
return;
}
if (context.fileIngestIsCancelled()) {
- stop();
+ logger.log(Level.INFO, "Keyword search ingest module instance {0} stopping search job due to ingest cancellation", instanceNum); //NON-NLS
+ SearchRunner.getInstance().stopJob(jobId);
+ cleanup();
return;
}
@@ -309,34 +311,20 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
// We only need to post the summary msg from the last module per job
if (refCounter.decrementAndGet(jobId) == 0) {
+ try {
+ final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
+ logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles); //NON-NLS
+ final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
+ logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks); //NON-NLS
+ } catch (NoOpenCoreException | KeywordSearchModuleException ex) {
+ logger.log(Level.SEVERE, "Error executing Solr queries to check number of indexed files and file chunks", ex); //NON-NLS
+ }
postIndexSummary();
synchronized (ingestStatus) {
ingestStatus.remove(jobId);
}
}
- //log number of files / chunks in index
- //signal a potential change in number of text_ingested files
- try {
- final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
- final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
- logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles); //NON-NLS
- logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks); //NON-NLS
- } catch (NoOpenCoreException | KeywordSearchModuleException ex) {
- logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex); //NON-NLS
- }
-
- cleanup();
- }
-
- /**
- * Handle stop event (ingest interrupted) Cleanup resources, threads, timers
- */
- private void stop() {
- logger.log(Level.INFO, "stop()"); //NON-NLS
-
- SearchRunner.getInstance().stopJob(jobId);
-
cleanup();
}
diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SearchRunner.java
index 6f5407e4a4..1a2ffd1200 100644
--- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SearchRunner.java
+++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/SearchRunner.java
@@ -131,6 +131,7 @@ public final class SearchRunner {
}
if (readyForFinalSearch) {
+ logger.log(Level.INFO, "Commiting search index before final search for search job {0}", job.getJobId()); //NON-NLS
commit();
doFinalSearch(job); //this will block until it's done
}
@@ -189,7 +190,7 @@ public final class SearchRunner {
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
KeywordSearch.fireNumIndexedFilesChange(null, numIndexedFiles);
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
- logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex); //NON-NLS
+ logger.log(Level.SEVERE, "Error executing Solr query to check number of indexed files", ex); //NON-NLS
}
}
@@ -201,21 +202,25 @@ public final class SearchRunner {
*/
private void doFinalSearch(SearchJobInfo job) {
// Run one last search as there are probably some new files committed
- logger.log(Level.INFO, "Running final search for jobid {0}", job.getJobId()); //NON-NLS
+ logger.log(Level.INFO, "Starting final search for search job {0}", job.getJobId()); //NON-NLS
if (!job.getKeywordListNames().isEmpty()) {
try {
// In case this job still has a worker running, wait for it to finish
+ logger.log(Level.INFO, "Checking for previous search for search job {0} before executing final search", job.getJobId()); //NON-NLS
job.waitForCurrentWorker();
SearchRunner.Searcher finalSearcher = new SearchRunner.Searcher(job, true);
job.setCurrentSearcher(finalSearcher); //save the ref
+ logger.log(Level.INFO, "Kicking off final search for search job {0}", job.getJobId()); //NON-NLS
finalSearcher.execute(); //start thread
// block until the search is complete
+ logger.log(Level.INFO, "Waiting for final search for search job {0}", job.getJobId()); //NON-NLS
finalSearcher.get();
+ logger.log(Level.INFO, "Final search for search job {0} completed", job.getJobId()); //NON-NLS
} catch (InterruptedException | CancellationException ex) {
- logger.log(Level.INFO, "Final search for search job {1} interrupted or cancelled", job.getJobId()); //NON-NLS
+ logger.log(Level.INFO, "Final search for search job {0} interrupted or cancelled", job.getJobId()); //NON-NLS
} catch (ExecutionException ex) {
logger.log(Level.SEVERE, String.format("Final search for search job %d failed", job.getJobId()), ex); //NON-NLS
}
@@ -246,6 +251,7 @@ public final class SearchRunner {
SearchJobInfo job = j.getValue();
// If no lists or the worker is already running then skip it
if (!job.getKeywordListNames().isEmpty() && !job.isWorkerRunning()) {
+ logger.log(Level.INFO, "Executing periodic search for search job {0}", job.getJobId());
Searcher searcher = new Searcher(job);
job.setCurrentSearcher(searcher); //save the ref
searcher.execute(); //start thread
@@ -341,7 +347,9 @@ public final class SearchRunner {
private void waitForCurrentWorker() throws InterruptedException {
synchronized (finalSearchLock) {
while (workerRunning) {
+ logger.log(Level.INFO, "Waiting for previous worker to finish"); //NON-NLS
finalSearchLock.wait(); //wait() releases the lock
+ logger.log(Level.INFO, "Notified previous worker finished"); //NON-NLS
}
}
}
@@ -351,6 +359,7 @@ public final class SearchRunner {
*/
private void searchNotify() {
synchronized (finalSearchLock) {
+ logger.log(Level.INFO, "Notifying after finishing search"); //NON-NLS
workerRunning = false;
finalSearchLock.notify();
}
@@ -505,8 +514,7 @@ public final class SearchRunner {
try {
finalizeSearcher();
stopWatch.stop();
-
- logger.log(Level.INFO, "Searcher took to run: {0} secs.", stopWatch.getElapsedTimeSecs()); //NON-NLS
+ logger.log(Level.INFO, "Searcher took {0} secs to run (final = {1})", new Object[]{stopWatch.getElapsedTimeSecs(), this.finalRun}); //NON-NLS
} finally {
// In case a thread is waiting on this worker to be done
job.searchNotify();
@@ -520,7 +528,9 @@ public final class SearchRunner {
protected void done() {
// call get to see if there were any errors
try {
+ logger.log(Level.INFO, "Searcher calling get() on itself in done()"); //NON-NLS
get();
+ logger.log(Level.INFO, "Searcher finished calling get() on itself in done()"); //NON-NLS
} catch (InterruptedException | ExecutionException e) {
logger.log(Level.SEVERE, "Error performing keyword search: " + e.getMessage()); //NON-NLS
services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(),
@@ -566,17 +576,18 @@ public final class SearchRunner {
}
/**
- * This method filters out all of the hits found in earlier
- * periodic searches and returns only the results found by the most
- * recent search.
+ * This method filters out all of the hits found in earlier periodic
+ * searches and returns only the results found by the most recent
+ * search.
*
* This method will only return hits for objects for which we haven't
* previously seen a hit for the keyword.
- *
+ *
* @param queryResult The results returned by a keyword search.
- * @return A unique set of hits found by the most recent search for objects
- * that have not previously had a hit. The hits will be for the lowest
- * numbered chunk associated with the object.
+ *
+ * @return A unique set of hits found by the most recent search for
+ * objects that have not previously had a hit. The hits will be
+ * for the lowest numbered chunk associated with the object.
*
*/
private QueryResults filterResults(QueryResults queryResult) {
diff --git a/NEWS.txt b/NEWS.txt
index a07761dfdd..351a8f2d9f 100644
--- a/NEWS.txt
+++ b/NEWS.txt
@@ -7,13 +7,13 @@ and can be applied to either the table viewer or the thumbnail viewer.
correctly.
- The View Source File in Directory context menu item now works correctly.
- Tagged image files in the HTML report are now displayed full-size.
-- A new enterprise artifact manager feature has been added to the optional
-Experimental plug-in (NetBeans module; this optional feature includes a database
-(SQLite or PostgreSQL) and logic for correlating artifacts across cases; results
-are displayed using an Interesting Artifacts branch of the Interesting Items
-tree and a content viewer.
+- A new central repository feature has been added to the optional
+CentralRepository plug-in (NetBeans module; this optional feature includes a
+database (SQLite or PostgreSQL) and logic for correlating artifacts across
+cases; results are displayed using an Interesting Artifacts branch of the
+Interesting Items tree and an Other Data Sources content viewer.
- Some potential deadlocks during ingest have been eliminated.
-- Assorted small enhancements and bug fixes.
+- Assorted small enhancements and bug fixes are included.
---------------- VERSION 4.4.0 --------------
Improvements:
diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml
index e3559b4155..198dc619db 100644
--- a/RecentActivity/nbproject/project.xml
+++ b/RecentActivity/nbproject/project.xml
@@ -60,7 +60,7 @@
10
- 10.7
+ 10.8
diff --git a/Testing/nbproject/project.xml b/Testing/nbproject/project.xml
index cfaf244b7b..7d8e0b6fc5 100644
--- a/Testing/nbproject/project.xml
+++ b/Testing/nbproject/project.xml
@@ -47,7 +47,7 @@
10
- 10.7
+ 10.8
diff --git a/build.xml b/build.xml
index 43534a7495..b0d244b119 100755
--- a/build.xml
+++ b/build.xml
@@ -265,6 +265,7 @@
+
diff --git a/docs/doxygen-user/installSolr.dox b/docs/doxygen-user/installSolr.dox
index 0547e6e65f..d47904df08 100755
--- a/docs/doxygen-user/installSolr.dox
+++ b/docs/doxygen-user/installSolr.dox
@@ -1,7 +1,5 @@
-/*! \page install_solr Install and Configure Solr and Zookeeper
-A central Solr server is needed to store keyword indexes. Zookeeper is used to manage Solr configuration and as a coordination service for Autopsy. It is installed as part of Bitnami Solr package, but has to be configured separately. To install Solr, perform the following steps:
-
-
+/*! \page install_solr Install and Configure Solr
+A central Solr server is needed to store keyword indexes, and its embedded Zookeeper is used as a coordination service for Autopsy. To install Solr, perform the following steps:
\section install_solr_prereq Prerequisites
@@ -81,22 +79,13 @@ The following steps will configure Solr to run using an account that will have a
5. From an Autopsy installation, copy the folder "C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\configsets" to "C:\Bitnami\solr-4.10.3-0\apache-solr\solr".
6. From an Autopsy installation, copy the folder "C:\Program Files\Autopsy-XXX(current version)\autopsy\solr\solr\lib" to "C:\Bitnami\solr-4.10.3-0\apache-solr\solr".
-\subsection configure_Zookeeper Zookeeper Configuration
-
- The following steps will configure Zookeeper.
- 1. Stop the solrJetty service by pressing Start, typing services.msc, pressing Enter, and locating the solrJetty Windows service. Select the service and press Stop the service. If the service is already stopped and there is no Stop the service available, this is okay.
- 2. Start a Windows command prompt as administrator by pressing Start, typing command, right clicking on Command Prompt, and clicking on Run as administrator. Then run the following command to uninstall the solrJetty service:
+7. Stop the solrJetty service by pressing Start, typing services.msc, pressing Enter, and locating the solrJetty Windows service. Select the service and press Stop the service. If the service is already stopped and there is no Stop the service available, this is okay.
+8. Start a Windows command prompt as administrator by pressing Start, typing command, right clicking on Command Prompt, and clicking on Run as administrator. Then run the following command to uninstall the solrJetty service:
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat UNINSTALL
You will very likely see a result that says "The solrJetty service is not started." This is okay.
-
- 3. Create a folder "C:\Bitnami\zookeeper" if it does not exist.
- 4. Edit "C:\Bitnami\solr-4.10.3-0\apache-solr\solr\zoo.cfg" to include the text dataDir=C:/Bitnami/zookeeper as shown in the screenshot below
-
- \image html zooDir.PNG
-
- 5. Start a Windows command prompt as administrator by pressing Start, typing command, right clicking on Command Prompt, and clicking on Run as administrator. Then run the following command to install the solrJetty service:
+9. Start a Windows command prompt as administrator by pressing Start, typing command, right clicking on Command Prompt, and clicking on Run as administrator. Then run the following command to install the solrJetty service:
cmd /c C:\Bitnami\solr-4.10.3-0\apache-solr\scripts\serviceinstall.bat INSTALL
Note the argument "INSTALL" is case sensitive. Your command prompt should look like the screenshot below. Very likely your command prompt will say "The solrJetty service could not be started." This is okay.
diff --git a/nbproject/project.properties b/nbproject/project.properties
index f970ea2eb7..8cceb63bc8 100644
--- a/nbproject/project.properties
+++ b/nbproject/project.properties
@@ -10,6 +10,7 @@ build.type=RELEASE
#build.type=DEVELOPMENT
project.org.netbeans.progress=org-netbeans-api-progress
+project.org.sleuthkit.autopsy.centralrepository=CentralRepository
project.org.sleuthkit.autopsy.experimental=Experimental
project.org.sleuthkit.autopsy.imagegallery=ImageGallery
update_versions=false
@@ -31,7 +32,8 @@ modules=\
${project.org.sleuthkit.autopsy.core}:\
${project.org.sleuthkit.autopsy.corelibs}:\
${project.org.sleuthkit.autopsy.imagegallery}:\
- ${project.org.sleuthkit.autopsy.experimental}
+ ${project.org.sleuthkit.autopsy.experimental}:\
+ ${project.org.sleuthkit.autopsy.centralrepository}
project.org.sleuthkit.autopsy.core=Core
project.org.sleuthkit.autopsy.corelibs=CoreLibs
project.org.sleuthkit.autopsy.keywordsearch=KeywordSearch
diff --git a/thirdparty/apache-mime4j/apache-mime4j-core-0.8.0-SNAPSHOT.jar b/thirdparty/apache-mime4j/apache-mime4j-core-0.8.0-SNAPSHOT.jar
new file mode 100644
index 0000000000..0aff45bb7d
Binary files /dev/null and b/thirdparty/apache-mime4j/apache-mime4j-core-0.8.0-SNAPSHOT.jar differ
diff --git a/thirdparty/apache-mime4j/apache-mime4j-dom-0.8.0-SNAPSHOT.jar b/thirdparty/apache-mime4j/apache-mime4j-dom-0.8.0-SNAPSHOT.jar
new file mode 100644
index 0000000000..773ab544ca
Binary files /dev/null and b/thirdparty/apache-mime4j/apache-mime4j-dom-0.8.0-SNAPSHOT.jar differ
diff --git a/thirdparty/apache-mime4j/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar b/thirdparty/apache-mime4j/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar
new file mode 100644
index 0000000000..bb7d15da61
Binary files /dev/null and b/thirdparty/apache-mime4j/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar differ
diff --git a/thirdparty/rr-full/plugins/appcompatcache.pl b/thirdparty/rr-full/plugins/appcompatcache.pl
index 332a31838a..2e43785121 100644
--- a/thirdparty/rr-full/plugins/appcompatcache.pl
+++ b/thirdparty/rr-full/plugins/appcompatcache.pl
@@ -296,6 +296,7 @@ sub appWin8 {
while($ofs < $len) {
my $tag = unpack("V",substr($data,$ofs,4));
+ last unless (defined $tag);
# 32-bit
if ($tag == 0x73746f72) {
$jmp = unpack("V",substr($data,$ofs + 8,4));
@@ -340,6 +341,7 @@ sub appWin10 {
while ($ofs < $len) {
$tag = substr($data,$ofs,4);
+ last unless (defined $tag);
if ($tag eq "10ts") {
$sz = unpack("V",substr($data,$ofs + 0x08,4));
diff --git a/thirdparty/rr-full/plugins/appcompatcache_tln.pl b/thirdparty/rr-full/plugins/appcompatcache_tln.pl
index e349d09320..fd767b51e0 100644
--- a/thirdparty/rr-full/plugins/appcompatcache_tln.pl
+++ b/thirdparty/rr-full/plugins/appcompatcache_tln.pl
@@ -291,6 +291,7 @@ sub appWin8 {
while($ofs < $len) {
my $tag = unpack("V",substr($data,$ofs,4));
+ last unless (defined $tag);
# 32-bit
if ($tag == 0x73746f72) {
$jmp = unpack("V",substr($data,$ofs + 8,4));
@@ -335,6 +336,7 @@ sub appWin10 {
while ($ofs < $len) {
$tag = substr($data,$ofs,4);
+ last unless (defined $tag);
if ($tag eq "10ts") {
$sz = unpack("V",substr($data,$ofs + 0x08,4));
diff --git a/thirdparty/rr-full/plugins/arpcache.pl b/thirdparty/rr-full/plugins/arpcache.pl
index b4b8a2a1f2..3a0b733ca2 100644
--- a/thirdparty/rr-full/plugins/arpcache.pl
+++ b/thirdparty/rr-full/plugins/arpcache.pl
@@ -122,6 +122,7 @@ sub parsePath {
while($tag) {
$ofs += 2;
my $i = substr($data,$ofs,2);
+ last unless (defined $i);
if (unpack("v",$i) == 0) {
$tag = 0;
}
diff --git a/thirdparty/rr-full/plugins/comdlg32.pl b/thirdparty/rr-full/plugins/comdlg32.pl
index 7eecfaaabf..1f41837ff2 100644
--- a/thirdparty/rr-full/plugins/comdlg32.pl
+++ b/thirdparty/rr-full/plugins/comdlg32.pl
@@ -386,6 +386,7 @@ sub parseShellItem {
while ($tag) {
my %item = ();
my $sz = unpack("v",substr($data,$cnt,2));
+ return %str unless (defined $sz);
$tag = 0 if (($sz == 0) || ($cnt + $sz > $len));
my $dat = substr($data,$cnt,$sz);
@@ -544,6 +545,7 @@ sub parseFolderEntry {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -559,10 +561,12 @@ sub parseFolderEntry {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
- else {
+ else {
$cnt++;
}
}
diff --git a/thirdparty/rr-full/plugins/itempos.pl b/thirdparty/rr-full/plugins/itempos.pl
index a1e309196a..8f5d4562b8 100644
--- a/thirdparty/rr-full/plugins/itempos.pl
+++ b/thirdparty/rr-full/plugins/itempos.pl
@@ -228,6 +228,7 @@ sub parseFolderItem {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -243,7 +244,9 @@ sub parseFolderItem {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
diff --git a/thirdparty/rr-full/plugins/shellbags.pl b/thirdparty/rr-full/plugins/shellbags.pl
index 7625f0b83f..5b8587af38 100644
--- a/thirdparty/rr-full/plugins/shellbags.pl
+++ b/thirdparty/rr-full/plugins/shellbags.pl
@@ -378,6 +378,7 @@ sub parseVariableEntry {
my $cnt = 0x10;
while($tag) {
my $sz = unpack("V",substr($stuff,$cnt,4));
+ return %item unless (defined $sz);
my $id = unpack("V",substr($stuff,$cnt + 4,4));
#--------------------------------------------------------------
# sub-segment types
@@ -421,6 +422,7 @@ sub parseVariableEntry {
my $t = 1;
while ($t) {
my $i = substr($data,$o,1);
+ return %item unless (defined $i);
if ($i =~ m/\x00/) {
$t = 0;
}
@@ -732,6 +734,7 @@ sub parseFolderEntry {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -747,7 +750,9 @@ sub parseFolderEntry {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
@@ -850,7 +855,9 @@ sub parseFolderEntry2 {
my $tag = 1;
while ($tag) {
- if (unpack("v",substr($data,$ofs,2)) == 0xbeef) {
+ my $s = substr($data,$ofs,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
@@ -951,6 +958,7 @@ sub shellItem0x52 {
while ($tag) {
$d = substr($data,0x32 + $cnt,2);
+ return %item unless (defined $d);
if (unpack("v",$d) == 0) {
$tag = 0;
}
diff --git a/thirdparty/rr-full/plugins/shellbags_test.pl b/thirdparty/rr-full/plugins/shellbags_test.pl
index 800592a8c2..2c4e7bda3d 100644
--- a/thirdparty/rr-full/plugins/shellbags_test.pl
+++ b/thirdparty/rr-full/plugins/shellbags_test.pl
@@ -358,6 +358,7 @@ sub parseFolderItem {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -373,7 +374,9 @@ sub parseFolderItem {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
diff --git a/thirdparty/rr-full/plugins/shellbags_tln.pl b/thirdparty/rr-full/plugins/shellbags_tln.pl
index c8f5bd0b22..bbbd3fe309 100644
--- a/thirdparty/rr-full/plugins/shellbags_tln.pl
+++ b/thirdparty/rr-full/plugins/shellbags_tln.pl
@@ -356,6 +356,7 @@ sub parseVariableEntry {
while($tag) {
my $sz = unpack("V",substr($stuff,$cnt,4));
my $id = unpack("V",substr($stuff,$cnt + 4,4));
+ return %item unless (defined $sz);
#--------------------------------------------------------------
# sub-segment types
# 0x0a - file name
@@ -386,6 +387,7 @@ sub parseVariableEntry {
# my $sz = unpack("V",substr($stuff,$cnt,4));
# my $id = unpack("V",substr($stuff,$cnt + 4,4));
#
+# return %item unless (defined $sz);
# if ($sz == 0x00) {
# $tag = 0;
# next;
@@ -652,6 +654,7 @@ sub parseFolderEntry {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -667,7 +670,9 @@ sub parseFolderEntry {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
diff --git a/thirdparty/rr-full/plugins/shellbags_xp.pl b/thirdparty/rr-full/plugins/shellbags_xp.pl
index 2c427d6638..b4f8ee2b00 100644
--- a/thirdparty/rr-full/plugins/shellbags_xp.pl
+++ b/thirdparty/rr-full/plugins/shellbags_xp.pl
@@ -397,7 +397,8 @@ sub parseVariableEntry {
# 0x0e, 0x0f, 0x10 - mod date, create date, access date(?)
# 0x0c - size
#--------------------------------------------------------------
- if ($sz == 0x00) {
+ return %item unless (defined $sz);
+ if ($sz == 0x00) {
$tag = 0;
next;
}
@@ -419,7 +420,7 @@ sub parseVariableEntry {
# while($tag) {
# my $sz = unpack("V",substr($stuff,$cnt,4));
# my $id = unpack("V",substr($stuff,$cnt + 4,4));
-#
+# return %item unless (defined $sz);
# if ($sz == 0x00) {
# $tag = 0;
# next;
@@ -725,6 +726,7 @@ sub parseFolderEntry {
my $str = "";
while($tag) {
my $s = substr($data,$ofs_shortname + $cnt,1);
+ return %item unless (defined $s);
if ($s =~ m/\x00/ && ((($cnt + 1) % 2) == 0)) {
$tag = 0;
}
@@ -740,7 +742,9 @@ sub parseFolderEntry {
$tag = 1;
$cnt = 0;
while ($tag) {
- if (unpack("v",substr($data,$ofs + $cnt,2)) == 0xbeef) {
+ my $s = substr($data,$ofs + $cnt,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
@@ -829,7 +833,9 @@ sub parseFolderEntry2 {
my $tag = 1;
while ($tag) {
- if (unpack("v",substr($data,$ofs,2)) == 0xbeef) {
+ my $s = substr($data,$ofs,2);
+ return %item unless (defined $s);
+ if (unpack("v",$s) == 0xbeef) {
$tag = 0;
}
else {
diff --git a/thirdparty/rr-full/plugins/shimcache.pl b/thirdparty/rr-full/plugins/shimcache.pl
index 527eb4dcc6..125143fbda 100755
--- a/thirdparty/rr-full/plugins/shimcache.pl
+++ b/thirdparty/rr-full/plugins/shimcache.pl
@@ -283,6 +283,7 @@ sub appWin8 {
while($ofs < $len) {
my $tag = unpack("V",substr($data,$ofs,4));
+ last unless (defined $tag);
# 32-bit
if ($tag == 0x73746f72) {
$jmp = unpack("V",substr($data,$ofs + 8,4));
@@ -327,6 +328,7 @@ sub appWin10 {
while ($ofs < $len) {
$tag = substr($data,$ofs,4);
+ last unless (defined $tag);
if ($tag eq "10ts") {
$sz = unpack("V",substr($data,$ofs + 0x08,4));
diff --git a/thirdparty/rr-full/plugins/shimcache_tln.pl b/thirdparty/rr-full/plugins/shimcache_tln.pl
index 1e03d645ce..b0fa1588b6 100755
--- a/thirdparty/rr-full/plugins/shimcache_tln.pl
+++ b/thirdparty/rr-full/plugins/shimcache_tln.pl
@@ -277,6 +277,7 @@ sub appWin8 {
while($ofs < $len) {
my $tag = unpack("V",substr($data,$ofs,4));
+ last unless (defined $tag);
# 32-bit
if ($tag == 0x73746f72) {
$jmp = unpack("V",substr($data,$ofs + 8,4));
@@ -321,6 +322,7 @@ sub appWin10 {
while ($ofs < $len) {
$tag = substr($data,$ofs,4);
+ last unless (defined $tag);
if ($tag eq "10ts") {
$sz = unpack("V",substr($data,$ofs + 0x08,4));
diff --git a/thunderbirdparser/build.xml b/thunderbirdparser/build.xml
index 80c452ef31..0b0a4fe2dd 100644
--- a/thunderbirdparser/build.xml
+++ b/thunderbirdparser/build.xml
@@ -20,6 +20,9 @@
+
+
+
diff --git a/thunderbirdparser/nbproject/project.properties b/thunderbirdparser/nbproject/project.properties
index e21eb352f8..ceee1f4265 100644
--- a/thunderbirdparser/nbproject/project.properties
+++ b/thunderbirdparser/nbproject/project.properties
@@ -1,6 +1,6 @@
-file.reference.apache-mime4j-core-0.8.0.jar=release/modules/ext/apache-mime4j-core-0.8.0.jar
-file.reference.apache-mime4j-dom-0.8.0.jar=release/modules/ext/apache-mime4j-dom-0.8.0.jar
-file.reference.apache-mime4j-mbox-iterator-0.8.0.jar=release/modules/ext/apache-mime4j-mbox-iterator-0.8.0.jar
+file.reference.apache-mime4j-core-0.8.0.jar=release/modules/ext/apache-mime4j-core-0.8.0-SNAPSHOT.jar
+file.reference.apache-mime4j-dom-0.8.0.jar=release/modules/ext/apache-mime4j-dom-0.8.0-SNAPSHOT.jar
+file.reference.apache-mime4j-mbox-iterator-0.8.0.jar=release/modules/ext/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar
file.reference.java-libpst-1.0-SNAPSHOT.jar=release/modules/ext/java-libpst-1.0-SNAPSHOT.jar
javac.source=1.8
javac.compilerargs=-Xlint -Xlint:-serial
diff --git a/thunderbirdparser/nbproject/project.xml b/thunderbirdparser/nbproject/project.xml
index 060c91a323..e0aea0045c 100644
--- a/thunderbirdparser/nbproject/project.xml
+++ b/thunderbirdparser/nbproject/project.xml
@@ -36,7 +36,7 @@
10
- 10.7
+ 10.8
@@ -51,20 +51,20 @@
- ext/apache-mime4j-core-0.8.0.jar
- release/modules/ext/apache-mime4j-core-0.8.0.jar
+ ext/apache-mime4j-core-0.8.0-SNAPSHOT.jar
+ release/modules/ext/apache-mime4j-core-0.8.0-SNAPSHOT.jar
- ext/apache-mime4j-dom-0.8.0.jar
- release/modules/ext/apache-mime4j-dom-0.8.0.jar
+ ext/apache-mime4j-dom-0.8.0-SNAPSHOT.jar
+ release/modules/ext/apache-mime4j-dom-0.8.0-SNAPSHOT.jar
ext/java-libpst-1.0-SNAPSHOT.jar
release/modules/ext/java-libpst-1.0-SNAPSHOT.jar
- ext/apache-mime4j-mbox-iterator-0.8.0.jar
- release/modules/ext/apache-mime4j-mbox-iterator-0.8.0.jar
+ ext/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar
+ release/modules/ext/apache-mime4j-mbox-iterator-0.8.0-SNAPSHOT.jar