diff --git a/BUILDING.txt b/BUILDING.txt
index 0d3e3ed9bd..a77d6c8add 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -37,16 +37,16 @@ to the root 64-bit JRE directory.
2) Get Sleuth Kit Setup
2a) Download and build a Release version of Sleuth Kit (TSK) 4.0. See
win32\BUILDING.txt in the TSK package for more information. You need to
- build the tsk_jni project. Select the Release_PostgreSQL Win32 or x64 target,
+ build the tsk_jni project. Select the Release Win32 or x64 target,
depending upon your target build. You can use a released version or download
the latest from github:
- git://github.com/sleuthkit/sleuthkit.git
-2b) Build the TSK JAR file by typing 'ant dist-PostgreSQL' in
+2b) Build the TSK JAR file by typing 'ant dist' in
bindings/java in the
TSK source code folder from a command line. Note it is case
sensitive. You can also add the code to a NetBeans project and build
- it from there, selecting the dist-PostgreSQL target.
+ it from there, selecting the dist target.
2c) Set TSK_HOME environment variable to the root directory of TSK
@@ -54,6 +54,9 @@ to the root 64-bit JRE directory.
from the TSK root directory to install the libraries and such in
the needed places (i.e. '/usr/local').
+2e) Build the TSK CaseUco jar file by running 'ant' in
+ the case-uco/java folder of the TSK source folder. You can also add the
+ code to a NetBeans project and build using the regular 'build' action.
3) For Windows builds, GStreamer must be setup. GStreamer is used to view video
files. You can either download it and install it, or you can copy it from the
@@ -100,7 +103,7 @@ the build process.
- The Sleuth Kit Java datamodel JAR file has native JNI libraries
that are copied into it. These JNI libraries have dependencies on
-libewf, zlib, libpq, libintl-8, libeay32, and ssleay32 DLL files. On non-Windows
+libewf, zlib, libintl-8, libeay32, and ssleay32 DLL files. On non-Windows
platforms, the JNI library also has a dependency on libtsk (on Windows,
it is compiled into libtsk_jni).
diff --git a/Core/build.xml b/Core/build.xml
index 811cea1149..8ac5c13d99 100644
--- a/Core/build.xml
+++ b/Core/build.xml
@@ -110,7 +110,9 @@
+ tofile="${ext.dir}/SparseBitSet-1.1.jar"/>
+
diff --git a/Core/nbproject/project.properties b/Core/nbproject/project.properties
index 3814ec6d0a..1898db811c 100644
--- a/Core/nbproject/project.properties
+++ b/Core/nbproject/project.properties
@@ -83,6 +83,7 @@ file.reference.sevenzipjbinding.jar=release/modules/ext/sevenzipjbinding.jar
file.reference.sis-metadata-0.8.jar=release\\modules\\ext\\sis-metadata-0.8.jar
file.reference.sis-netcdf-0.8.jar=release\\modules\\ext\\sis-netcdf-0.8.jar
file.reference.sis-utility-0.8.jar=release\\modules\\ext\\sis-utility-0.8.jar
+file.reference.sleuthkit-caseuco-4.9.0.jar=release\\modules\\ext\\sleuthkit-caseuco-4.9.0.jar
file.reference.slf4j-api-1.7.25.jar=release\\modules\\ext\\slf4j-api-1.7.25.jar
file.reference.sqlite-jdbc-3.25.2.jar=release/modules/ext/sqlite-jdbc-3.25.2.jar
file.reference.StixLib.jar=release/modules/ext/StixLib.jar
diff --git a/Core/nbproject/project.xml b/Core/nbproject/project.xml
index 552b7b5fa4..b751ffbf07 100644
--- a/Core/nbproject/project.xml
+++ b/Core/nbproject/project.xml
@@ -471,6 +471,10 @@
ext/commons-pool2-2.4.2.jar
release/modules/ext/commons-pool2-2.4.2.jar
+
+ ext/sleuthkit-4.9.0.jar
+ release/modules/ext/sleuthkit-4.9.0.jar
+
ext/jxmapviewer2-2.4.jar
release/modules/ext/jxmapviewer2-2.4.jar
@@ -639,10 +643,6 @@
ext/commons-validator-1.6.jar
release/modules/ext/commons-validator-1.6.jar
-
- ext/sleuthkit-4.9.0.jar
- release/modules/ext/sleuthkit-4.9.0.jar
-
ext/decodetect-core-0.3.jar
release/modules/ext/decodetect-core-0.3.jar
@@ -779,6 +779,10 @@
ext/curator-client-2.8.0.jar
release/modules/ext/curator-client-2.8.0.jar
+
+ ext/sleuthkit-caseuco-4.9.0.jar
+ release\modules\ext\sleuthkit-caseuco-4.9.0.jar
+
ext/fontbox-2.0.13.jar
release\modules\ext\fontbox-2.0.13.jar
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTask.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTask.java
index a1443bde31..53320d9313 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTask.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTask.java
@@ -30,6 +30,7 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgress
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imagewriter.ImageWriterService;
import org.sleuthkit.autopsy.imagewriter.ImageWriterSettings;
+import org.sleuthkit.datamodel.AddDataSourceCallbacks;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitJNI;
@@ -42,17 +43,10 @@ import org.sleuthkit.datamodel.TskDataException;
class AddImageTask implements Runnable {
private final Logger logger = Logger.getLogger(AddImageTask.class.getName());
- private final String deviceId;
- private final String imagePath;
- private final int sectorSize;
- private final String timeZone;
- private final ImageWriterSettings imageWriterSettings;
- private final boolean ignoreFatOrphanFiles;
- private final String md5;
- private final String sha1;
- private final String sha256;
+ private final ImageDetails imageDetails;
private final DataSourceProcessorProgressMonitor progressMonitor;
- private final DataSourceProcessorCallback callback;
+ private final AddDataSourceCallbacks addDataSourceCallbacks;
+ private final AddImageTaskCallback addImageTaskCallback;
private boolean criticalErrorOccurred;
/*
@@ -73,40 +67,18 @@ class AddImageTask implements Runnable {
/**
* Constructs a runnable task that adds an image to the case database.
- *
- * @param deviceId An ASCII-printable identifier for the device
- * associated with the data source that is
- * intended to be unique across multiple cases
- * (e.g., a UUID).
- * @param imagePath Path to the image file.
- * @param sectorSize The sector size (use '0' for autodetect).
- * @param timeZone The time zone to use when processing dates
- * and times for the image, obtained from
- * java.util.TimeZone.getID.
- * @param ignoreFatOrphanFiles Whether to parse orphans if the image has a
- * FAT filesystem.
- * @param md5 The MD5 hash of the image, may be null.
- * @param sha1 The SHA-1 hash of the image, may be null.
- * @param sha256 The SHA-256 hash of the image, may be null.
- * @param imageWriterPath Path that a copy of the image should be
- * written to. Use empty string to disable image
- * writing
+ *
+ * @param imageDetails Holds all data about the image.
* @param progressMonitor Progress monitor to report progress during
* processing.
- * @param callback Callback to call when processing is done.
+ * @param addDataSourceCallbacks Callback for sending data to the ingest pipeline if an ingest stream is being used.
+ * @param addImageTaskCallback Callback for dealing with add image task completion.
*/
- AddImageTask(String deviceId, String imagePath, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, String md5, String sha1, String sha256, ImageWriterSettings imageWriterSettings,
- DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
- this.deviceId = deviceId;
- this.imagePath = imagePath;
- this.sectorSize = sectorSize;
- this.timeZone = timeZone;
- this.ignoreFatOrphanFiles = ignoreFatOrphanFiles;
- this.md5 = md5;
- this.sha1 = sha1;
- this.sha256 = sha256;
- this.imageWriterSettings = imageWriterSettings;
- this.callback = callback;
+ AddImageTask(ImageDetails imageDetails, DataSourceProcessorProgressMonitor progressMonitor, AddDataSourceCallbacks addDataSourceCallbacks,
+ AddImageTaskCallback addImageTaskCallback) {
+ this.imageDetails = imageDetails;
+ this.addDataSourceCallbacks = addDataSourceCallbacks;
+ this.addImageTaskCallback = addImageTaskCallback;
this.progressMonitor = progressMonitor;
tskAddImageProcessLock = new Object();
}
@@ -120,21 +92,21 @@ class AddImageTask implements Runnable {
try {
currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
- logger.log(Level.SEVERE, String.format("Failed to add image data source at %s, no current case", imagePath), ex);
+ logger.log(Level.SEVERE, String.format("Failed to start AddImageTask for %s, no current case", imageDetails.getImagePath()), ex);
return;
}
progressMonitor.setIndeterminate(true);
progressMonitor.setProgress(0);
String imageWriterPath = "";
- if (imageWriterSettings != null) {
- imageWriterPath = imageWriterSettings.getPath();
+ if (imageDetails.imageWriterSettings != null) {
+ imageWriterPath = imageDetails.imageWriterSettings.getPath();
}
List errorMessages = new ArrayList<>();
List newDataSources = new ArrayList<>();
try {
synchronized (tskAddImageProcessLock) {
if (!tskAddImageProcessStopped) {
- tskAddImageProcess = currentCase.getSleuthkitCase().makeAddImageProcess(timeZone, true, ignoreFatOrphanFiles, imageWriterPath);
+ tskAddImageProcess = currentCase.getSleuthkitCase().makeAddImageProcess(imageDetails.timeZone, true, imageDetails.ignoreFatOrphanFiles, imageWriterPath);
} else {
return;
}
@@ -143,7 +115,7 @@ class AddImageTask implements Runnable {
progressUpdateThread.start();
runAddImageProcess(errorMessages);
progressUpdateThread.interrupt();
- commitOrRevertAddImageProcess(currentCase, errorMessages, newDataSources);
+ finishAddImageProcess(errorMessages, newDataSources);
progressMonitor.setProgress(100);
} finally {
DataSourceProcessorCallback.DataSourceProcessorResult result;
@@ -154,7 +126,7 @@ class AddImageTask implements Runnable {
} else {
result = DataSourceProcessorResult.NO_ERRORS;
}
- callback.done(result, errorMessages, newDataSources);
+ addImageTaskCallback.onCompleted(result, errorMessages, newDataSources);
}
}
@@ -177,7 +149,7 @@ class AddImageTask implements Runnable {
tskAddImageProcess.stop();
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Error cancelling adding image %s to the case database", imagePath), ex); //NON-NLS
+ logger.log(Level.SEVERE, String.format("Error cancelling adding image %s to the case database", imageDetails.getImagePath()), ex); //NON-NLS
}
}
}
@@ -191,23 +163,22 @@ class AddImageTask implements Runnable {
*/
private void runAddImageProcess(List errorMessages) {
try {
- tskAddImageProcess.run(deviceId, new String[]{imagePath}, sectorSize);
+ tskAddImageProcess.run(imageDetails.deviceId, imageDetails.image, imageDetails.sectorSize, this.addDataSourceCallbacks);
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Critical error occurred adding image %s", imagePath), ex); //NON-NLS
+ logger.log(Level.SEVERE, String.format("Critical error occurred adding image %s", imageDetails.getImagePath()), ex); //NON-NLS
criticalErrorOccurred = true;
errorMessages.add(ex.getMessage());
} catch (TskDataException ex) {
- logger.log(Level.WARNING, String.format("Non-critical error occurred adding image %s", imagePath), ex); //NON-NLS
+ logger.log(Level.WARNING, String.format("Non-critical error occurred adding image %s", imageDetails.getImagePath()), ex); //NON-NLS
errorMessages.add(ex.getMessage());
}
}
/**
- * Commits or reverts the results of the TSK add image process. If the
- * process was stopped before it completed or there was a critical error the
- * results are reverted, otherwise they are committed.
+ * Handle the results of the TSK add image process.
+ * The image will be in the database even if a critical error occurred or
+ * the user canceled.
*
- * @param currentCase The current case.
* @param errorMessages Error messages, if any, are added to this list for
* eventual return via the callback.
* @param newDataSources If the new image is successfully committed, it is
@@ -216,84 +187,66 @@ class AddImageTask implements Runnable {
*
* @return
*/
- private void commitOrRevertAddImageProcess(Case currentCase, List errorMessages, List newDataSources) {
+ private void finishAddImageProcess(List errorMessages, List newDataSources) {
synchronized (tskAddImageProcessLock) {
- if (tskAddImageProcessStopped || criticalErrorOccurred) {
+ Image newImage = imageDetails.image;
+ String verificationError = newImage.verifyImageSize();
+ if (!verificationError.isEmpty()) {
+ errorMessages.add(verificationError);
+ }
+ if (imageDetails.imageWriterSettings != null) {
+ ImageWriterService.createImageWriter(newImage.getId(), imageDetails.imageWriterSettings);
+ }
+ newDataSources.add(newImage);
+
+ // If the add image process was cancelled don't do any further processing here
+ if (tskAddImageProcessStopped) {
+ return;
+ }
+
+ if (!StringUtils.isBlank(imageDetails.md5)) {
try {
- tskAddImageProcess.revert();
+ newImage.setMD5(imageDetails.md5);
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Error reverting after adding image %s to the case database", imagePath), ex); //NON-NLS
+ logger.log(Level.SEVERE, String.format("Failed to add MD5 hash for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
errorMessages.add(ex.getMessage());
criticalErrorOccurred = true;
+ } catch (TskDataException ignored) {
+ /*
+ * The only reasonable way for this to happen at
+ * present is through C/C++ processing of an EWF
+ * image, which is not an error.
+ */
}
- } else {
+ }
+ if (!StringUtils.isBlank(imageDetails.sha1)) {
try {
- long imageId = tskAddImageProcess.commit();
- if (imageId != 0) {
- Image newImage = currentCase.getSleuthkitCase().getImageById(imageId);
- String verificationError = newImage.verifyImageSize();
- if (!verificationError.isEmpty()) {
- errorMessages.add(verificationError);
- }
- if (imageWriterSettings != null) {
- ImageWriterService.createImageWriter(imageId, imageWriterSettings);
- }
- newDataSources.add(newImage);
- if (!StringUtils.isBlank(md5)) {
- try {
- newImage.setMD5(md5);
- } catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Failed to add MD5 hash for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
- errorMessages.add(ex.getMessage());
- criticalErrorOccurred = true;
- } catch (TskDataException ignored) {
- /*
- * The only reasonable way for this to happen at
- * present is through C/C++ processing of an EWF
- * image, which is not an error.
- */
- }
- }
- if (!StringUtils.isBlank(sha1)) {
- try {
- newImage.setSha1(sha1);
- } catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Failed to add SHA1 hash for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
- errorMessages.add(ex.getMessage());
- criticalErrorOccurred = true;
- } catch (TskDataException ignored) {
- /*
- * The only reasonable way for this to happen at
- * present is through C/C++ processing of an EWF
- * image, which is not an error.
- */
- }
- }
- if (!StringUtils.isBlank(sha256)) {
- try {
- newImage.setSha256(sha256);
- } catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Failed to add SHA256 for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
- errorMessages.add(ex.getMessage());
- criticalErrorOccurred = true;
- } catch (TskDataException ignored) {
- /*
- * The only reasonable way for this to happen at
- * present is through C/C++ processing of an EWF
- * image, which is not an error.
- */
- }
- }
- } else {
- String errorMessage = String.format("Error commiting after adding image %s to the case database, no object id returned", imagePath); //NON-NLS
- logger.log(Level.SEVERE, errorMessage);
- errorMessages.add(errorMessage);
- criticalErrorOccurred = true;
- }
+ newImage.setSha1(imageDetails.sha1);
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Error committing adding image %s to the case database", imagePath), ex); //NON-NLS
+ logger.log(Level.SEVERE, String.format("Failed to add SHA1 hash for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
errorMessages.add(ex.getMessage());
criticalErrorOccurred = true;
+ } catch (TskDataException ignored) {
+ /*
+ * The only reasonable way for this to happen at
+ * present is through C/C++ processing of an EWF
+ * image, which is not an error.
+ */
+ }
+ }
+ if (!StringUtils.isBlank(imageDetails.sha256)) {
+ try {
+ newImage.setSha256(imageDetails.sha256);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, String.format("Failed to add SHA256 for image data source %s (objId=%d)", newImage.getName(), newImage.getId()), ex);
+ errorMessages.add(ex.getMessage());
+ criticalErrorOccurred = true;
+ } catch (TskDataException ignored) {
+ /*
+ * The only reasonable way for this to happen at
+ * present is through C/C++ processing of an EWF
+ * image, which is not an error.
+ */
}
}
}
@@ -352,4 +305,37 @@ class AddImageTask implements Runnable {
}
}
+ /**
+ * Utility class to hold image data.
+ */
+ static class ImageDetails {
+ String deviceId;
+ Image image;
+ int sectorSize;
+ String timeZone;
+ boolean ignoreFatOrphanFiles;
+ String md5;
+ String sha1;
+ String sha256;
+ ImageWriterSettings imageWriterSettings;
+
+ ImageDetails(String deviceId, Image image, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, String md5, String sha1, String sha256, ImageWriterSettings imageWriterSettings) {
+ this.deviceId = deviceId;
+ this.image = image;
+ this.sectorSize = sectorSize;
+ this.timeZone = timeZone;
+ this.ignoreFatOrphanFiles = ignoreFatOrphanFiles;
+ this.md5 = md5;
+ this.sha1 = sha1;
+ this.sha256 = sha256;
+ this.imageWriterSettings = imageWriterSettings;
+ }
+
+ String getImagePath() {
+ if (image.getPaths().length > 0) {
+ return image.getPaths()[0];
+ }
+ return "Unknown data source path";
+ }
+ }
}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTaskCallback.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTaskCallback.java
new file mode 100644
index 0000000000..ec76166419
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageTaskCallback.java
@@ -0,0 +1,38 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.casemodule;
+
+import java.util.List;
+import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult;
+import org.sleuthkit.datamodel.Content;
+
+/**
+ * Called on completion of the add image task.
+ */
+interface AddImageTaskCallback {
+
+ /**
+ * Called when the add image task is completed.
+ *
+ * @param result The result from the data source processor.
+ * @param errList The list of errors.
+ * @param newDataSources The list of new data sources.
+ */
+ void onCompleted(DataSourceProcessorResult result, List errList, List newDataSources);
+}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java
index 15eb0c3c67..2b880093e9 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardAddingProgressPanel.java
@@ -302,7 +302,9 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel {
private void startIngest() {
if (!newContents.isEmpty() && readyToIngest && !ingested) {
ingested = true;
- IngestManager.getInstance().queueIngestJob(newContents, ingestJobSettings);
+ if (dsProcessor != null && ! dsProcessor.supportsIngestStream()) {
+ IngestManager.getInstance().queueIngestJob(newContents, ingestJobSettings);
+ }
setStateFinished();
}
}
@@ -360,8 +362,12 @@ class AddImageWizardAddingProgressPanel extends ShortcutWizardDescriptorPanel {
setStateStarted();
- // Kick off the DSProcessor
- dsProcessor.run(getDSPProgressMonitorImpl(), cbObj);
+ // Kick off the DSProcessor
+ if (dsProcessor.supportsIngestStream()) {
+ dsProcessor.runWithIngestStream(ingestJobSettings, getDSPProgressMonitorImpl(), cbObj);
+ } else {
+ dsProcessor.run(getDSPProgressMonitorImpl(), cbObj);
+ }
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIterator.java b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIterator.java
index 9dad21446e..045bf775b2 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIterator.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/AddImageWizardIterator.java
@@ -41,6 +41,7 @@ class AddImageWizardIterator implements WizardDescriptor.Iterator sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.casemodule;
+
+import java.util.List;
+import org.sleuthkit.autopsy.ingest.IngestStream;
+import org.sleuthkit.autopsy.ingest.IngestStreamClosedException;
+
+/**
+ * This is a default ingest stream to use with the data source processors when
+ * an IngestStream is not supplied. Adding files/data sources are no-ops.
+ */
+class DefaultIngestStream implements IngestStream {
+
+ private boolean isClosed = false;
+ private boolean isStopped = false;
+
+ @Override
+ public void addFiles(List fileObjectIds) throws IngestStreamClosedException {
+ // Do nothing
+ }
+
+ @Override
+ public synchronized boolean isClosed() {
+ return isClosed;
+ }
+
+ @Override
+ public synchronized void close() {
+ isClosed = true;
+ }
+
+ @Override
+ public synchronized void stop() {
+ isClosed = true;
+ isStopped = true;
+ }
+
+ @Override
+ public synchronized boolean wasStopped() {
+ return isStopped;
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java
index 596272f09f..9dd9a39fd4 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/ImageDSProcessor.java
@@ -24,6 +24,7 @@ import javax.swing.JPanel;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.List;
+import java.util.logging.Level;
import java.util.UUID;
import javax.swing.filechooser.FileFilter;
import org.openide.util.NbBundle;
@@ -33,7 +34,14 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgress
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
import org.sleuthkit.autopsy.coreutils.DataSourceUtils;
+import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
+import org.sleuthkit.autopsy.ingest.IngestJobSettings;
+import org.sleuthkit.autopsy.ingest.IngestManager;
+import org.sleuthkit.autopsy.ingest.IngestStream;
+import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.SleuthkitJNI;
+import org.sleuthkit.datamodel.TskCoreException;
/**
* A image file data source processor that implements the DataSourceProcessor
@@ -49,6 +57,7 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSourceProcessor {
private final static String DATA_SOURCE_TYPE = NbBundle.getMessage(ImageDSProcessor.class, "ImageDSProcessor.dsType.text");
+ private final Logger logger = Logger.getLogger(ImageDSProcessor.class.getName());
private static final List allExt = new ArrayList<>();
private static final GeneralFilter rawFilter = new GeneralFilter(GeneralFilter.RAW_IMAGE_EXTS, GeneralFilter.RAW_IMAGE_DESC);
private static final GeneralFilter encaseFilter = new GeneralFilter(GeneralFilter.ENCASE_IMAGE_EXTS, GeneralFilter.ENCASE_IMAGE_DESC);
@@ -58,6 +67,8 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
private static final List filtersList = new ArrayList<>();
private final ImageFilePanel configPanel;
private AddImageTask addImageTask;
+ private IngestStream ingestStream = null;
+ private Image image = null;
/*
* TODO: Remove the setDataSourceOptionsCalled flag and the settings fields
* when the deprecated method setDataSourceOptions is removed.
@@ -170,6 +181,77 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
*/
@Override
public void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
+ ingestStream = new DefaultIngestStream();
+ readConfigSettings();
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ doAddImageProcess(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progressMonitor, callback);
+ }
+
+ /**
+ * Adds a data source to the case database using a background task in a
+ * separate thread and the settings provided by the selection and
+ * configuration panel. Files found during ingest will be sent directly to the
+ * IngestStream provided. Returns as soon as the background task is started.
+ * The background task uses a callback object to signal task completion and
+ * return results.
+ *
+ * This method should not be called unless isPanelValid returns true, and
+ * should only be called for DSPs that support ingest streams.
+ *
+ * @param settings The ingest job settings.
+ * @param progress Progress monitor that will be used by the
+ * background task to report progress.
+ * @param callBack Callback that will be used by the background task
+ * to return results.
+ */
+ @Override
+ public void runWithIngestStream(IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
+ DataSourceProcessorCallback callBack) {
+
+ // Read the settings from the wizard
+ readConfigSettings();
+
+ // Set up the data source before creating the ingest stream
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{imagePath}, sectorSize, timeZone, md5, sha1, sha256, deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ // Now initialize the ingest stream
+ try {
+ ingestStream = IngestManager.getInstance().openIngestStream(image, settings);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error starting ingest modules", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+
+ doAddImageProcess(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progress, callBack);
+ }
+
+ /**
+ * Store the options from the config panel.
+ */
+ private void readConfigSettings() {
if (!setDataSourceOptionsCalled) {
configPanel.storeSettings();
deviceId = UUID.randomUUID().toString();
@@ -190,8 +272,17 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
sha256 = null;
}
}
- run(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, progressMonitor, callback);
}
+
+ /**
+ * Check if this DSP supports ingest streams.
+ *
+ * @return True if this DSP supports an ingest stream, false otherwise.
+ */
+ @Override
+ public boolean supportsIngestStream() {
+ return true;
+ }
/**
* Adds a data source to the case database using a background task in a
@@ -215,7 +306,19 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
* @param callback Callback to call when processing is done.
*/
public void run(String deviceId, String imagePath, String timeZone, boolean ignoreFatOrphanFiles, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
- run(deviceId, imagePath, 0, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callback);
+ ingestStream = new DefaultIngestStream();
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{imagePath}, sectorSize, timeZone, "", "", "", deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ doAddImageProcess(deviceId, imagePath, 0, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callback);
}
/**
@@ -224,6 +327,10 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
* selection and configuration panel. Returns as soon as the background task
* is started and uses the callback object to signal task completion and
* return results.
+ *
+ * The image should be loaded in the database and stored in "image"
+ * before calling this method. Additionally, an ingest stream should be initialized
+ * and stored in "ingestStream".
*
* @param deviceId An ASCII-printable identifier for the device
* associated with the data source that is
@@ -243,8 +350,31 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
* during processing.
* @param callback Callback to call when processing is done.
*/
- private void run(String deviceId, String imagePath, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, String md5, String sha1, String sha256, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
- addImageTask = new AddImageTask(deviceId, imagePath, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, null, progressMonitor, callback);
+ private void doAddImageProcess(String deviceId, String imagePath, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, String md5, String sha1, String sha256, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
+
+ // If the data source or ingest stream haven't been initialized, stop processing
+ if (ingestStream == null) {
+ String message = "Ingest stream was not initialized before running the add image process on " + imagePath;
+ logger.log(Level.SEVERE, message);
+ final List errors = new ArrayList<>();
+ errors.add(message);
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+ if (image == null) {
+ String message = "Image was not added to database before running the add image process on " + imagePath;
+ logger.log(Level.SEVERE, message);
+ final List errors = new ArrayList<>();
+ errors.add(message);
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ AddImageTask.ImageDetails imageDetails = new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, md5, sha1, sha256, null);
+ addImageTask = new AddImageTask(imageDetails,
+ progressMonitor,
+ new StreamingAddDataSourceCallbacks(ingestStream),
+ new StreamingAddImageTaskCallback(ingestStream, callback));
new Thread(addImageTask).start();
}
@@ -260,6 +390,9 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
if (null != addImageTask) {
addImageTask.cancelTask();
}
+ if (ingestStream != null) {
+ ingestStream.stop();
+ }
}
/**
@@ -316,7 +449,20 @@ public class ImageDSProcessor implements DataSourceProcessor, AutoIngestDataSour
this.timeZone = Calendar.getInstance().getTimeZone().getID();
this.ignoreFatOrphanFiles = false;
setDataSourceOptionsCalled = true;
- run(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack);
+
+ ingestStream = new DefaultIngestStream();
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{imagePath}, sectorSize, timeZone, "", "", "", deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding data source with path " + imagePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callBack.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ doAddImageProcess(deviceId, dataSourcePath.toString(), sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, progressMonitor, callBack);
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/LocalDiskDSProcessor.java b/Core/src/org/sleuthkit/autopsy/casemodule/LocalDiskDSProcessor.java
index 60c0aed94b..0551d15466 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/LocalDiskDSProcessor.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/LocalDiskDSProcessor.java
@@ -18,15 +18,22 @@
*/
package org.sleuthkit.autopsy.casemodule;
+import java.util.ArrayList;
import java.util.Calendar;
+import java.util.List;
import java.util.UUID;
+import java.util.logging.Level;
import javax.swing.JPanel;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
+import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.imagewriter.ImageWriterSettings;
+import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.SleuthkitJNI;
+import org.sleuthkit.datamodel.TskCoreException;
/**
* A local drive data source processor that implements the DataSourceProcessor
@@ -37,6 +44,7 @@ import org.sleuthkit.autopsy.imagewriter.ImageWriterSettings;
@ServiceProvider(service = DataSourceProcessor.class)
public class LocalDiskDSProcessor implements DataSourceProcessor {
+ private final Logger logger = Logger.getLogger(LocalDiskDSProcessor.class.getName());
private static final String DATA_SOURCE_TYPE = NbBundle.getMessage(LocalDiskDSProcessor.class, "LocalDiskDSProcessor.dsType.text");
private final LocalDiskPanel configPanel;
private AddImageTask addDiskTask;
@@ -139,7 +147,25 @@ public class LocalDiskDSProcessor implements DataSourceProcessor {
imageWriterSettings = null;
}
}
- addDiskTask = new AddImageTask(deviceId, drivePath, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, imageWriterSettings, progressMonitor, callback);
+
+ Image image;
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{drivePath}, sectorSize,
+ timeZone, null, null, null, deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding local disk with path " + drivePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ addDiskTask = new AddImageTask(
+ new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, imageWriterSettings),
+ progressMonitor,
+ new StreamingAddDataSourceCallbacks(new DefaultIngestStream()),
+ new StreamingAddImageTaskCallback(new DefaultIngestStream(), callback));
new Thread(addDiskTask).start();
}
@@ -191,7 +217,23 @@ public class LocalDiskDSProcessor implements DataSourceProcessor {
* @param callback Callback to call when processing is done.
*/
private void run(String deviceId, String drivePath, int sectorSize, String timeZone, boolean ignoreFatOrphanFiles, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) {
- addDiskTask = new AddImageTask(deviceId, drivePath, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, imageWriterSettings, progressMonitor, callback);
+ Image image;
+ try {
+ image = SleuthkitJNI.addImageToDatabase(Case.getCurrentCase().getSleuthkitCase(),
+ new String[]{drivePath}, sectorSize,
+ timeZone, null, null, null, deviceId);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error adding local disk with path " + drivePath + " to database", ex);
+ final List errors = new ArrayList<>();
+ errors.add(ex.getMessage());
+ callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errors, new ArrayList<>());
+ return;
+ }
+
+ addDiskTask = new AddImageTask(new AddImageTask.ImageDetails(deviceId, image, sectorSize, timeZone, ignoreFatOrphanFiles, null, null, null, imageWriterSettings),
+ progressMonitor,
+ new StreamingAddDataSourceCallbacks(new DefaultIngestStream()),
+ new StreamingAddImageTaskCallback(new DefaultIngestStream(), callback));
new Thread(addDiskTask).start();
}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddDataSourceCallbacks.java b/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddDataSourceCallbacks.java
new file mode 100644
index 0000000000..fb26c7a195
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddDataSourceCallbacks.java
@@ -0,0 +1,66 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.casemodule;
+
+import java.util.List;
+import java.util.logging.Level;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.ingest.IngestStream;
+import org.sleuthkit.autopsy.ingest.IngestStreamClosedException;
+import org.sleuthkit.datamodel.AddDataSourceCallbacks;
+
+/**
+ * A set of callbacks to be called during the process of adding a data source to
+ * the case database. This implementation of the interface is suitable for
+ * streaming ingest use cases.
+ */
+class StreamingAddDataSourceCallbacks implements AddDataSourceCallbacks {
+
+ private final Logger logger = Logger.getLogger(StreamingAddDataSourceCallbacks.class.getName());
+ private final IngestStream ingestStream;
+
+ /**
+ * Constructs a set of callbacks to be called during the process of adding a
+ * data source to the case database. This implementation of the interface is
+ * suitable for streaming ingest use cases.
+ *
+ * @param stream The IngestStream to send data to
+ */
+ StreamingAddDataSourceCallbacks(IngestStream stream) {
+ ingestStream = stream;
+ }
+
+ @Override
+ public void onFilesAdded(List fileObjectIds) {
+ if (ingestStream.wasStopped()) {
+ return;
+ }
+
+ try {
+ ingestStream.addFiles(fileObjectIds);
+ } catch (IngestStreamClosedException ex) {
+ if (!ingestStream.wasStopped()) {
+ // If the ingest stream is closed but not stopped log the error.
+ // This state should only happen once the data source is completely
+ // added which means it's a severe error that files are still being added.
+ logger.log(Level.SEVERE, "Error adding files to ingest stream - ingest stream is closed");
+ }
+ }
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddImageTaskCallback.java b/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddImageTaskCallback.java
new file mode 100644
index 0000000000..d03f85097a
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/StreamingAddImageTaskCallback.java
@@ -0,0 +1,65 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.casemodule;
+
+import java.util.List;
+import org.sleuthkit.autopsy.ingest.IngestStream;
+import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback;
+import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult;
+import org.sleuthkit.datamodel.Content;
+
+/**
+ * A callback to be called on completion of an add image task. This
+ * implementation of the interface is suitable for streaming ingest use cases.
+ * It closes the ingest stream and then calls the data source processor done
+ * callback.
+ */
+class StreamingAddImageTaskCallback implements AddImageTaskCallback {
+
+ private final IngestStream ingestStream;
+ private final DataSourceProcessorCallback dspCallback;
+
+ /**
+ * Constructs a callback to be called on completion of an add image task.
+ * This implementation of the interface is suitable for streaming ingest use
+ * cases. It closes the ingest stream and then calls the data source
+ * processor done callback.
+ *
+ * @param ingestStream The ingest stream that data is being sent to.
+ * @param dspCallback The callback for non-ingest stream related
+ * processing.
+ */
+ StreamingAddImageTaskCallback(IngestStream ingestStream, DataSourceProcessorCallback dspCallback) {
+ this.ingestStream = ingestStream;
+ this.dspCallback = dspCallback;
+ }
+
+ /**
+ * Called when the add image task is completed.
+ *
+ * @param result The result from the data source processor.
+ * @param errList The list of errors.
+ * @param newDataSources The list of new data sources.
+ */
+ @Override
+ public void onCompleted(DataSourceProcessorResult result, List errList, List newDataSources) {
+ ingestStream.close();
+ dspCallback.done(result, errList, newDataSources);
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccount.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccount.java
index eb245db07e..90e61fbd0d 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccount.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoAccount.java
@@ -23,6 +23,7 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Objects;
import org.apache.commons.lang.StringUtils;
import org.sleuthkit.datamodel.Account;
@@ -222,10 +223,13 @@ public final class CentralRepoAccount {
public static Collection getAccountsWithIdentifierLike(String accountIdentifierSubstring) throws CentralRepoException {
String queryClause = ACCOUNTS_QUERY_CLAUSE
- + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER('%" + accountIdentifierSubstring + "%')";
+ + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER(?)";
+
+ List params = new ArrayList<>();
+ params.add("%" + accountIdentifierSubstring + "%");
AccountsQueryCallback queryCallback = new AccountsQueryCallback();
- CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback);
+ CentralRepository.getInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getAccountsList();
}
@@ -245,10 +249,13 @@ public final class CentralRepoAccount {
String normalizedAccountIdentifier = normalizeAccountIdentifier(accountIdentifier);
String queryClause = ACCOUNTS_QUERY_CLAUSE
- + " WHERE LOWER(accounts.account_unique_identifier) = LOWER('" + normalizedAccountIdentifier + "')";
+ + " WHERE LOWER(accounts.account_unique_identifier) = LOWER(?)";
+
+ List params = new ArrayList<>();
+ params.add(normalizedAccountIdentifier);
AccountsQueryCallback queryCallback = new AccountsQueryCallback();
- CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback);
+ CentralRepository.getInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getAccountsList();
}
@@ -266,8 +273,10 @@ public final class CentralRepoAccount {
String queryClause = ACCOUNTS_QUERY_CLAUSE;
+ List params = new ArrayList<>(); // empty param list
+
AccountsQueryCallback queryCallback = new AccountsQueryCallback();
- CentralRepository.getInstance().executeSelectSQL(queryClause, queryCallback);
+ CentralRepository.getInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getAccountsList();
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDbUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDbUtil.java
index 69ea8d4e05..6ba23b65b5 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDbUtil.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoDbUtil.java
@@ -25,6 +25,9 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.logging.Level;
+import javax.swing.SwingUtilities;
+import org.openide.windows.TopComponent;
+import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import static org.sleuthkit.autopsy.centralrepository.datamodel.RdbmsCentralRepo.SOFTWARE_CR_DB_SCHEMA_VERSION;
@@ -259,8 +262,23 @@ public class CentralRepoDbUtil {
* used
*/
public static void setUseCentralRepo(boolean centralRepoCheckBoxIsSelected) {
+ if (!centralRepoCheckBoxIsSelected) {
+ closePersonasTopComponent();
+ }
ModuleSettings.setConfigSetting(CENTRAL_REPO_NAME, CENTRAL_REPO_USE_KEY, Boolean.toString(centralRepoCheckBoxIsSelected));
}
+
+ /**
+ * Closes Personas top component if it exists.
+ */
+ private static void closePersonasTopComponent() {
+ SwingUtilities.invokeLater(() -> {
+ TopComponent personasWindow = WindowManager.getDefault().findTopComponent("PersonasTopComponent");
+ if (personasWindow != null && personasWindow.isOpened()) {
+ personasWindow.close();
+ }
+ });
+ }
/**
* Use the current settings and the validation query to test the connection
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java
index 596ae4e90c..971988b20b 100755
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepository.java
@@ -65,7 +65,8 @@ public interface CentralRepository {
* It will not close active/in-use connections. Thus, it is vital that there
* are no in-use connections when you call this method.
*
- * @throws CentralRepoException if there is a problem closing the connection pool.
+ * @throws CentralRepoException if there is a problem closing the connection
+ * pool.
*/
void shutdownConnections() throws CentralRepoException;
@@ -102,7 +103,7 @@ public interface CentralRepository {
/**
* Add a new name/value pair in the db_info table.
*
- * @param name Key to set
+ * @param name Key to set
* @param value Value to set
*
* @throws CentralRepoException
@@ -113,9 +114,9 @@ public interface CentralRepository {
* Set the data source object id for a specific entry in the data_sources
* table
*
- * @param rowId - the row id for the data_sources table entry
+ * @param rowId - the row id for the data_sources table entry
* @param dataSourceObjectId - the object id for the data source from the
- * caseDb
+ * caseDb
*/
void addDataSourceObjectId(int rowId, long dataSourceObjectId) throws CentralRepoException;
@@ -133,7 +134,7 @@ public interface CentralRepository {
/**
* Update the value for a name in the name/value db_info table.
*
- * @param name Name to find
+ * @param name Name to find
* @param value Value to assign to name.
*
* @throws CentralRepoException
@@ -162,10 +163,10 @@ public interface CentralRepository {
* @param eamCase The case to update
*/
void updateCase(CorrelationCase eamCase) throws CentralRepoException;
-
+
/**
- * Queries the examiner table for the given user name.
- * Adds a row if the user is not found in the examiner table.
+ * Queries the examiner table for the given user name. Adds a row if the
+ * user is not found in the examiner table.
*
* @param examinerLoginName user name to look for.
* @return CentralRepoExaminer for the given user name.
@@ -216,7 +217,7 @@ public interface CentralRepository {
* @param eamDataSource the data source to add
*
* @return - A CorrelationDataSource object with data source's central
- * repository id
+ * repository id
*/
CorrelationDataSource newDataSource(CorrelationDataSource eamDataSource) throws CentralRepoException;
@@ -245,8 +246,8 @@ public interface CentralRepository {
/**
* Retrieves Data Source details based on data source device ID
*
- * @param correlationCase the current CorrelationCase used for ensuring
- * uniqueness of DataSource
+ * @param correlationCase the current CorrelationCase used for ensuring
+ * uniqueness of DataSource
* @param caseDbDataSourceId the data source device ID number
*
* @return The data source
@@ -257,8 +258,8 @@ public interface CentralRepository {
* Retrieves Data Source details based on data source ID
*
* @param correlationCase the current CorrelationCase used for ensuring
- * uniqueness of DataSource
- * @param dataSourceId the data source ID number
+ * uniqueness of DataSource
+ * @param dataSourceId the data source ID number
*
* @return The data source
*/
@@ -275,7 +276,7 @@ public interface CentralRepository {
* Changes the name of a data source in the DB
*
* @param eamDataSource The data source
- * @param newName The new name
+ * @param newName The new name
*
* @throws CentralRepoException
*/
@@ -293,12 +294,12 @@ public interface CentralRepository {
* Retrieves eamArtifact instances from the database that are associated
* with the eamArtifactType and eamArtifactValues of the given eamArtifact.
*
- * @param aType EamArtifact.Type to search for
+ * @param aType EamArtifact.Type to search for
* @param values The list of correlation values to get
- * CorrelationAttributeInstances for
+ * CorrelationAttributeInstances for
*
* @return List of artifact instances for a given type with the specified
- * values
+ * values
*
* @throws CorrelationAttributeNormalizationException
* @throws CentralRepoException
@@ -324,14 +325,14 @@ public interface CentralRepository {
* with the eamArtifactType and eamArtifactValues of the given eamArtifact
* for the specified cases.
*
- * @param aType The type of the artifact
- * @param values The list of correlation values to get
- * CorrelationAttributeInstances for
+ * @param aType The type of the artifact
+ * @param values The list of correlation values to get
+ * CorrelationAttributeInstances for
* @param caseIds The list of central repository case ids to get
- * CorrelationAttributeInstances for
+ * CorrelationAttributeInstances for
*
* @return List of artifact instances for a given type with the specified
- * values for the specified cases
+ * values for the specified cases
*
* @throws CorrelationAttributeNormalizationException
* @throws CentralRepoException
@@ -346,7 +347,7 @@ public interface CentralRepository {
* @param value Value to search for
*
* @return Number of artifact instances having ArtifactType and
- * ArtifactValue.
+ * ArtifactValue.
*/
Long getCountArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws CentralRepoException, CorrelationAttributeNormalizationException;
@@ -385,7 +386,7 @@ public interface CentralRepository {
* @param correlationDataSource Data source to search for
*
* @return Number of artifact instances having caseDisplayName and
- * dataSource
+ * dataSource
*/
Long getCountArtifactInstancesByCaseDataSource(CorrelationDataSource correlationDataSource) throws CentralRepoException;
@@ -414,7 +415,7 @@ public interface CentralRepository {
* in the associated CorrelationAttribute object.
*
* @param eamArtifact The correlation attribute whose database instance will
- * be updated.
+ * be updated.
*
* @throws CentralRepoException
*/
@@ -427,11 +428,11 @@ public interface CentralRepository {
* Method exists to support instances added using Central Repository version
* 1,1 and older
*
- * @param type The type of instance.
- * @param correlationCase The case tied to the instance.
+ * @param type The type of instance.
+ * @param correlationCase The case tied to the instance.
* @param correlationDataSource The data source tied to the instance.
- * @param value The value tied to the instance.
- * @param filePath The file path tied to the instance.
+ * @param value The value tied to the instance.
+ * @param filePath The file path tied to the instance.
*
* @return The correlation attribute if it exists; otherwise null.
*
@@ -444,11 +445,10 @@ public interface CentralRepository {
* Find a correlation attribute in the Central Repository database given the
* instance type, case, data source, object id.
*
- * @param type The type of instance.
- * @param correlationCase The case tied to the instance.
+ * @param type The type of instance.
+ * @param correlationCase The case tied to the instance.
* @param correlationDataSource The data source tied to the instance.
- * @param objectID The object id of the file tied to the
- * instance.
+ * @param objectID The object id of the file tied to the instance.
*
* @return The correlation attribute if it exists; otherwise null.
*
@@ -484,7 +484,7 @@ public interface CentralRepository {
* @param value Value to search for
*
* @return List of cases containing this artifact with instances marked as
- * bad
+ * bad
*
* @throws CentralRepoException
*/
@@ -498,7 +498,7 @@ public interface CentralRepository {
* @param value Value to search for
*
* @return List of cases containing this artifact with instances marked as
- * bad
+ * bad
*
* @throws CentralRepoException
*/
@@ -555,13 +555,13 @@ public interface CentralRepository {
*/
public boolean isFileHashInReferenceSet(String hash, int referenceSetID) throws CentralRepoException, CorrelationAttributeNormalizationException;
-
/**
- * Retrieves the given file HashHitInfo if the given file hash is in this
+ * Retrieves the given file HashHitInfo if the given file hash is in this
* reference set. Only searches the reference_files table.
*
* @param hash The hash to find in a search.
- * @param referenceSetID The referenceSetID within which the file should exist.
+ * @param referenceSetID The referenceSetID within which the file should
+ * exist.
*
* @return The HashHitInfo if found or null if not found.
*
@@ -570,7 +570,6 @@ public interface CentralRepository {
*/
HashHitInfo lookupHash(String hash, int referenceSetID) throws CentralRepoException, CorrelationAttributeNormalizationException;
-
/**
* Check if the given value is in a specific reference set
*
@@ -638,7 +637,7 @@ public interface CentralRepository {
* Update an existing organization.
*
* @param updatedOrganization the values the Organization with the same ID
- * will be updated to in the database.
+ * will be updated to in the database.
*
* @throws CentralRepoException
*/
@@ -690,8 +689,7 @@ public interface CentralRepository {
* Add a new reference instance
*
* @param eamGlobalFileInstance The reference instance to add
- * @param correlationType Correlation Type that this Reference
- * Instance is
+ * @param correlationType Correlation Type that this Reference Instance is
*
* @throws CentralRepoException
*/
@@ -701,8 +699,8 @@ public interface CentralRepository {
* Insert the bulk collection of Global File Instances
*
* @param globalInstances a Set of EamGlobalFileInstances to insert into the
- * db.
- * @param contentType the Type of the global instances
+ * db.
+ * @param contentType the Type of the global instances
*
* @throws CentralRepoException
*/
@@ -711,7 +709,7 @@ public interface CentralRepository {
/**
* Get all reference entries having a given correlation type and value
*
- * @param aType Type to use for matching
+ * @param aType Type to use for matching
* @param aValue Value to use for matching
*
* @return List of all global file instances with a type and value
@@ -736,7 +734,7 @@ public interface CentralRepository {
* used to correlate artifacts.
*
* @return List of EamArtifact.Type's. If none are defined in the database,
- * the default list will be returned.
+ * the default list will be returned.
*
* @throws CentralRepoException
*/
@@ -747,7 +745,7 @@ public interface CentralRepository {
* artifacts.
*
* @return List of enabled EamArtifact.Type's. If none are defined in the
- * database, the default list will be returned.
+ * database, the default list will be returned.
*
* @throws CentralRepoException
*/
@@ -758,7 +756,7 @@ public interface CentralRepository {
* correlate artifacts.
*
* @return List of supported EamArtifact.Type's. If none are defined in the
- * database, the default list will be returned.
+ * database, the default list will be returned.
*
* @throws CentralRepoException
*/
@@ -799,15 +797,15 @@ public interface CentralRepository {
*
* @return the lock, or null if locking is not supported
*
- * @throws CentralRepoException if the coordination service is running but we fail
- * to get the lock
+ * @throws CentralRepoException if the coordination service is running but
+ * we fail to get the lock
*/
public CoordinationService.Lock getExclusiveMultiUserDbLock() throws CentralRepoException;
/**
* Process the Artifact instance in the EamDb
*
- * @param type EamArtifact.Type to search for
+ * @param type EamArtifact.Type to search for
* @param instanceTableCallback callback to process the instance
*
* @throws CentralRepoException
@@ -817,9 +815,9 @@ public interface CentralRepository {
/**
* Process the Artifact instance in the EamDb
*
- * @param type EamArtifact.Type to search for
+ * @param type EamArtifact.Type to search for
* @param instanceTableCallback callback to process the instance
- * @param whereClause query string to execute
+ * @param whereClause query string to execute
*
* @throws CentralRepoException
*/
@@ -828,72 +826,58 @@ public interface CentralRepository {
/**
* Process a SELECT query
*
- * @param selectClause query string to execute
+ * @param selectClause query string to execute
* @param instanceTableCallback callback to process the instance
*
* @throws CentralRepoException
*/
- public void processSelectClause(String selectClause, InstanceTableCallback instanceTableCallback) throws CentralRepoException;
-
-
+ public void processSelectClause(String selectClause, InstanceTableCallback instanceTableCallback) throws CentralRepoException;
+
/**
- * Executes an INSERT sql statement on the central repository database.
- * @param sql INSERT sql to execute.
- *
- * @throws CentralRepoException If there is an error.
+ * Executes an INSERT/UPDATE/DELETE sql as a prepared statement, on the
+ * central repository database.
+ *
+ * @param sql sql to execute.
+ * @param params List of query params to use, may be empty.
+ *
+ * @throws CentralRepoException If there is an error.
*/
- void executeInsertSQL(String sql) throws CentralRepoException;
-
+ void executeCommand(String sql, List params) throws CentralRepoException;
+
/**
- * Executes a SELECT sql statement on the central repository database.
- *
- * @param sql SELECT sql to execute.
+ * Executes a SELECT query sql as a prepared statement, on the central
+ * repository database.
+ *
+ * @param sql sql to execute.
+ * @param params List of query params to use, may be empty.
* @param queryCallback Query callback to handle the result of the query.
- *
+ *
* @throws CentralRepoException If there is an error.
*/
- void executeSelectSQL(String sql, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException;
-
- /**
- * Executes an UPDATE sql statement on the central repository database.
- *
- * @param sql UPDATE sql to execute.
- *
- * @throws CentralRepoException If there is an error.
- */
- void executeUpdateSQL(String sql) throws CentralRepoException;
-
- /**
- * Executes a DELETE sql statement on the central repository database.
- *
- * @param sql DELETE sql to execute.
- *
- * @throws CentralRepoException If there is an error.
- */
- void executeDeleteSQL(String sql) throws CentralRepoException;
-
+ void executeQuery(String sql, List params, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException;
+
/**
* Get account type by type name.
- *
+ *
* @param accountTypeName account type name to look for
* @return CR account type
- * @throws CentralRepoException
+ * @throws CentralRepoException
*/
CentralRepoAccountType getAccountTypeByName(String accountTypeName) throws CentralRepoException;
-
+
/**
* Gets all account types.
- *
+ *
* @return Collection of all CR account types in the database.
- *
- * @throws CentralRepoException
+ *
+ * @throws CentralRepoException
*/
Collection getAllAccountTypes() throws CentralRepoException;
-
+
/**
- * Get an account from the accounts table matching the given type/ID.
+ * Get an account from the accounts table matching the given type/ID.
* Inserts a row if one doesn't exists.
- *
+ *
* @param crAccountType CR account type to look for or create
* @param accountUniqueID type specific unique account id
* @return CR account
@@ -902,5 +886,5 @@ public interface CentralRepository {
* @throws InvalidAccountIDException If the account identifier is not valid.
*/
CentralRepoAccount getOrCreateAccount(CentralRepoAccount.CentralRepoAccountType crAccountType, String accountUniqueID) throws InvalidAccountIDException, CentralRepoException;
-
+
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java
index cc1b17ab80..67ef828902 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/Persona.java
@@ -24,11 +24,11 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Objects;
import java.util.UUID;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
-import org.sleuthkit.datamodel.SleuthkitCase;
/**
* This class abstracts a persona.
@@ -122,7 +122,7 @@ public class Persona {
private final long modifiedDate;
private final PersonaStatus status;
private final CentralRepoExaminer examiner;
-
+
@NbBundle.Messages("Persona.defaultName=Unnamed")
public static String getDefaultName() {
return Bundle.Persona_defaultName();
@@ -200,16 +200,17 @@ public class Persona {
/**
* Creates a Persona and associates the specified account with it.
*
- * @param personaName Persona name.
- * @param comment Comment to associate with persona, may be null.
- * @param status Persona status
- * @param account Account for which the persona is being created.
+ * @param personaName Persona name.
+ * @param comment Comment to associate with persona, may be null.
+ * @param status Persona status
+ * @param account Account for which the persona is being created.
* @param justification Justification for why this account belongs to this
- * persona, may be null.
- * @param confidence Confidence level for this association of Persona &
- * account.
+ * persona, may be null.
+ * @param confidence Confidence level for this association of Persona &
+ * account.
*
* @return Persona Persona created.
+ *
* @throws CentralRepoException If there is an error creating the Persona.
*/
public static Persona createPersonaForAccount(String personaName, String comment, PersonaStatus status, CentralRepoAccount account, String justification, Persona.Confidence confidence) throws CentralRepoException {
@@ -221,15 +222,15 @@ public class Persona {
/**
* Inserts a row in the Persona tables.
*
- * @param name Persona name, may be null - default name is used in that
- * case.
+ * @param name Persona name, may be null - default name is used in that
+ * case.
* @param comment Comment to associate with persona, may be null.
- * @param status Persona status.
+ * @param status Persona status.
*
* @return Persona corresponding to the row inserted in the personas table.
*
* @throws CentralRepoException If there is an error in adding a row to
- * personas table.
+ * personas table.
*/
private static Persona createPersona(String name, String comment, PersonaStatus status) throws CentralRepoException {
// generate a UUID for the persona
@@ -238,32 +239,38 @@ public class Persona {
Instant instant = Instant.now();
Long timeStampMillis = instant.toEpochMilli();
- String insertClause = " INTO personas (uuid, comment, name, created_date, modified_date, status_id, examiner_id ) "
- + "VALUES ( '" + uuidStr + "', "
- + "'" + ((StringUtils.isBlank(comment) ? "" : SleuthkitCase.escapeSingleQuotes(comment))) + "',"
- + "'" + ((StringUtils.isBlank(name) ? getDefaultName() : SleuthkitCase.escapeSingleQuotes(name))) + "',"
- + timeStampMillis.toString() + ","
- + timeStampMillis.toString() + ","
- + status.getStatusId() + ","
- + examiner.getId()
- + ")";
- getCRInstance().executeInsertSQL(insertClause);
+ String insertPersonaSQL = "INSERT INTO personas (uuid, comment, name, created_date, modified_date, status_id, examiner_id ) " //NON-NLS
+ + " VALUES (?, ?, ?, ?, ?, ?, ?)";
+ List params = new ArrayList<>();
+ params.add(uuidStr);
+ params.add(StringUtils.isBlank(comment) ? "" : comment);
+ params.add(StringUtils.isBlank(name) ? getDefaultName() : name);
+ params.add(timeStampMillis);
+ params.add(timeStampMillis);
+ params.add(status.getStatusId());
+ params.add(examiner.getId());
+
+ getCRInstance().executeCommand(insertPersonaSQL, params);
return getPersonaByUUID(uuidStr);
}
-
+
/**
* Sets the comment of this persona.
*
* @param comment The new comment.
- *
+ *
* @throws CentralRepoException If there is an error.
*/
public void setComment(String comment) throws CentralRepoException {
- String updateClause = "UPDATE personas SET comment = '" + comment + "' WHERE id = " + id;
+ String updateSQL = "UPDATE personas SET comment = ? WHERE id = ?";
CentralRepository cr = CentralRepository.getInstance();
if (cr != null) {
- getCRInstance().executeUpdateSQL(updateClause);
+ List params = new ArrayList<>();
+ params.add(StringUtils.isBlank(comment) ? "" : comment);
+ params.add(id);
+
+ getCRInstance().executeCommand(updateSQL, params);
}
}
@@ -271,14 +278,18 @@ public class Persona {
* Sets the name of this persona
*
* @param name The new name.
- *
+ *
* @throws CentralRepoException If there is an error.
*/
public void setName(String name) throws CentralRepoException {
- String updateClause = "UPDATE personas SET name = '" + name + "' WHERE id = " + id;
+ String updateSQL = "UPDATE personas SET name = ? WHERE id = ?";
CentralRepository cr = CentralRepository.getInstance();
if (cr != null) {
- cr.executeUpdateSQL(updateClause);
+ List params = new ArrayList<>();
+ params.add(StringUtils.isBlank(name) ? getDefaultName() : name);
+ params.add(id);
+
+ getCRInstance().executeCommand(updateSQL, params);
}
}
@@ -286,52 +297,57 @@ public class Persona {
* Associates an account with a persona by creating a row in the
* PersonaAccounts table.
*
- * @param account Account to add to persona.
+ * @param account Account to add to persona.
* @param justification Reason for adding the account to persona, may be
- * null.
- * @param confidence Confidence level.
+ * null.
+ * @param confidence Confidence level.
*
* @return PersonaAccount
+ *
* @throws CentralRepoException If there is an error.
*/
public PersonaAccount addAccount(CentralRepoAccount account, String justification, Persona.Confidence confidence) throws CentralRepoException {
return PersonaAccount.addPersonaAccount(this, account, justification, confidence);
}
-
+
/**
* Removes the given PersonaAccount (persona/account association)
*
* @param account account to remove
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void removeAccount(PersonaAccount account) throws CentralRepoException {
PersonaAccount.removePersonaAccount(account.getId());
}
-
+
/**
* Modifies the confidence / justification of the given PersonaAccount
*
- * @param account Account to modify.
- * @param confidence Level of confidence.
+ * @param account Account to modify.
+ * @param confidence Level of confidence.
* @param justification Justification.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void modifyAccount(PersonaAccount account, Confidence confidence, String justification) throws CentralRepoException {
PersonaAccount.modifyPersonaAccount(account.getId(), confidence, justification);
}
-
+
/**
* Marks this persona as deleted
*/
public void delete() throws CentralRepoException {
- String deleteSQL = "UPDATE personas SET status_id = " + PersonaStatus.DELETED.status_id + " WHERE id = " + this.id;
+ String deleteSQL = "UPDATE personas SET status_id = ? WHERE id = ?";
CentralRepository cr = CentralRepository.getInstance();
if (cr != null) {
- cr.executeUpdateSQL(deleteSQL);
+ List params = new ArrayList<>();
+ params.add(PersonaStatus.DELETED.getStatusId());
+ params.add(id);
+
+ getCRInstance().executeCommand(deleteSQL, params);
}
}
@@ -340,7 +356,7 @@ public class Persona {
*/
private static class PersonaQueryCallback implements CentralRepositoryDbQueryCallback {
- private final Collection personaList = new ArrayList<>();
+ private final Collection personaList = new ArrayList<>();
@Override
public void process(ResultSet rs) throws SQLException {
@@ -361,7 +377,7 @@ public class Persona {
status,
examiner
);
-
+
personaList.add(persona);
}
}
@@ -373,126 +389,161 @@ public class Persona {
// Partial query string to select from personas table,
// just supply the where clause.
- private static final String PERSONA_QUERY =
- "SELECT p.id, p.uuid, p.name, p.comment, p.created_date, p.modified_date, p.status_id, p.examiner_id, e.login_name, e.display_name "
- + "FROM personas as p "
- + "INNER JOIN examiners as e ON e.id = p.examiner_id ";
-
-
+ private static final String PERSONA_QUERY
+ = "SELECT p.id, p.uuid, p.name, p.comment, p.created_date, p.modified_date, p.status_id, p.examiner_id, e.login_name, e.display_name "
+ + "FROM personas as p "
+ + "INNER JOIN examiners as e ON e.id = p.examiner_id ";
+
/**
* Gets the row from the Personas table with the given UUID, creates and
* returns the Persona from that data.
*
* @param uuid Persona UUID to match.
+ *
* @return Persona matching the given UUID, may be null if no match is
- * found.
+ * found.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
private static Persona getPersonaByUUID(String uuid) throws CentralRepoException {
- String queryClause =
- PERSONA_QUERY
- + "WHERE p.uuid = '" + uuid + "'";
+ String queryClause
+ = PERSONA_QUERY
+ + "WHERE p.uuid = ?";
+
+ List params = new ArrayList<>();
+ params.add(uuid);
PersonaQueryCallback queryCallback = new PersonaQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(queryClause, params, queryCallback);
Collection personas = queryCallback.getPersonas();
-
+
return personas.isEmpty() ? null : personas.iterator().next();
}
/**
- * Gets the rows from the Personas table with matching name.
- * Persona marked as DELETED are not returned.
+ * Gets the rows from the Personas table with matching name. Persona marked
+ * as DELETED are not returned.
*
* @param partialName Name substring to match.
+ *
* @return Collection of personas matching the given name substring, may be
- * empty if no match is found.
+ * empty if no match is found.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public static Collection getPersonaByName(String partialName) throws CentralRepoException {
String queryClause = PERSONA_QUERY
- + "WHERE p.status_id != " + PersonaStatus.DELETED.status_id +
- " AND LOWER(p.name) LIKE " + "LOWER('%" + partialName + "%')" ;
+ + "WHERE p.status_id != ? "
+ + " AND LOWER(p.name) LIKE LOWER(?) ESCAPE '!'";
+
+ List params = new ArrayList<>();
+ params.add(PersonaStatus.DELETED.getStatusId());
+ params.add("%" + getLikeEscaped(partialName) + "%"); // partial substring search
PersonaQueryCallback queryCallback = new PersonaQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getPersonas();
}
-
+
+ /**
+ * Escapes string for use with like statements removing '%', '_', '\'. This
+ * uses '!' as the escape character and the sql should reflect this
+ * accordingly. See
+ * https://stackoverflow.com/questions/8247970/using-like-wildcard-in-prepared-statement,
+ * https://www.postgresql.org/docs/8.3/functions-matching.html and
+ * https://www.sqlite.org/lang_expr.html for more information.
+ *
+ * @param initial The initial string.
+ *
+ * @return The resulting string.
+ */
+ private static String getLikeEscaped(String initial) {
+ if (initial == null) {
+ return null;
+ }
+
+ return initial
+ .replace("!", "!!")
+ .replace("%", "!%")
+ .replace("_", "!_");
+ }
+
/**
* Gets the rows from the Personas table where persona accounts' names are
* similar to the given one. Persona marked as DELETED are not returned.
*
* @param partialName Name substring to match.
+ *
* @return Collection of personas matching the given name substring, may be
- * empty if no match is found.
+ * empty if no match is found.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public static Collection getPersonaByAccountIdentifierLike(String partialName) throws CentralRepoException {
- String queryClause = "SELECT DISTINCT accounts.id as a_id,"
- + "p.id, p.uuid, p.name, p.comment, p.created_date, p.modified_date, p.status_id, p.examiner_id, e.login_name, e.display_name"
- + " FROM accounts"
- + " JOIN persona_accounts as pa ON pa.account_id = accounts.id"
- + " JOIN personas as p ON p.id = pa.persona_id"
- + " JOIN examiners as e ON e.id = p.examiner_id"
- + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER('%" + partialName + "%')"
- + " AND p.status_id != " + Persona.PersonaStatus.DELETED.getStatusId()
- + " GROUP BY p.id";
+ String queryClause = "SELECT p.id, p.uuid, p.name, p.comment, p.created_date, p.modified_date, p.status_id, p.examiner_id, e.login_name\n"
+ + "FROM personas p\n"
+ + "LEFT JOIN examiners e ON e.id = p.examiner_id\n"
+ + "WHERE p.status_id <> ?\n"
+ + "AND p.id IN (\n"
+ + " SELECT pa.persona_id\n"
+ + " FROM persona_accounts pa\n"
+ + " INNER JOIN accounts a ON a.id = pa.account_id\n"
+ + " WHERE LOWER(a.account_unique_identifier) LIKE LOWER(?) ESCAPE '!'\n"
+ + ")";
PersonaQueryCallback queryCallback = new PersonaQueryCallback();
- CentralRepository cr = CentralRepository.getInstance();
- if (cr != null) {
- cr.executeSelectSQL(queryClause, queryCallback);
- }
+ List params = new ArrayList<>();
+ params.add(PersonaStatus.DELETED.getStatusId());
+ params.add("%" + getLikeEscaped(partialName) + "%"); // partial substring search
+
+ getCRInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getPersonas();
}
-
+
/**
* Creates an alias for the Persona.
*
- * @param alias Alias name.
+ * @param alias Alias name.
* @param justification Reason for assigning the alias, may be null.
- * @param confidence Confidence level.
+ * @param confidence Confidence level.
*
* @return PersonaAlias
+ *
* @throws CentralRepoException If there is an error in creating the alias.
*/
public PersonaAlias addAlias(String alias, String justification, Persona.Confidence confidence) throws CentralRepoException {
return PersonaAlias.addPersonaAlias(this, alias, justification, confidence);
}
-
+
/**
* Removes the given alias.
*
* @param alias alias to remove
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void removeAlias(PersonaAlias alias) throws CentralRepoException {
PersonaAlias.removePersonaAlias(alias);
}
-
+
/**
* Modifies the given alias.
*
- * @param key Key for the alias to modify.
- * @param confidence Level of confidence.
+ * @param key Key for the alias to modify.
+ * @param confidence Level of confidence.
* @param justification Justification.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void modifyAlias(PersonaAlias key, Confidence confidence, String justification) throws CentralRepoException {
PersonaAlias.modifyPersonaAlias(key, confidence, justification);
@@ -512,39 +563,40 @@ public class Persona {
/**
* Adds specified metadata to the persona.
*
- * @param name Metadata name.
- * @param value Metadata value.
+ * @param name Metadata name.
+ * @param value Metadata value.
* @param justification Reason for adding the metadata, may be null.
- * @param confidence Confidence level.
+ * @param confidence Confidence level.
*
* @return PersonaMetadata
+ *
* @throws CentralRepoException If there is an error in adding metadata.
*/
public PersonaMetadata addMetadata(String name, String value, String justification, Persona.Confidence confidence) throws CentralRepoException {
return PersonaMetadata.addPersonaMetadata(this.getId(), name, value, justification, confidence);
}
-
+
/**
* Removes the given metadata from this persona.
*
* @param metadata metadata to remove
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void removeMetadata(PersonaMetadata metadata) throws CentralRepoException {
PersonaMetadata.removePersonaMetadata(metadata);
}
-
+
/**
* Modifies the given metadata.
*
- * @param key Key for the metadata to modify.
- * @param confidence Level of confidence.
+ * @param key Key for the metadata to modify.
+ * @param confidence Level of confidence.
* @param justification Justification.
*
* @throws CentralRepoException If there is an error in querying the
- * Personas table.
+ * Personas table.
*/
public void modifyMetadata(PersonaMetadata key, Confidence confidence, String justification) throws CentralRepoException {
PersonaMetadata.modifyPersonaMetadata(key, confidence, justification);
@@ -567,12 +619,12 @@ public class Persona {
* @return Collection of PersonaAccounts, may be empty.
*
* @throws CentralRepoException If there is an error in getting the
- * persona_account.
+ * persona_account.
*/
public Collection getPersonaAccounts() throws CentralRepoException {
return PersonaAccount.getPersonaAccountsForPersona(this.getId());
}
-
+
/**
* Callback to process a query that gets cases for account instances of an
* account
@@ -600,8 +652,9 @@ public class Persona {
* Gets a list of cases that the persona appears in.
*
* @return Collection of cases that the persona appears in, may be empty.
+ *
* @throws CentralRepoException If there is an error in getting the cases
- * from the database.
+ * from the database.
*/
public Collection getCases() throws CentralRepoException {
@@ -615,10 +668,13 @@ public class Persona {
String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(correlationType);
String querySql = "SELECT DISTINCT case_id FROM " + tableName
- + " WHERE account_id = " + account.getId();
+ + " WHERE account_id = ?"; // param 1
+
+ List params = new ArrayList<>();
+ params.add(account.getId());
CaseForAccountInstanceQueryCallback queryCallback = new CaseForAccountInstanceQueryCallback();
- getCRInstance().executeSelectSQL(querySql, queryCallback);
+ getCRInstance().executeQuery(querySql, params, queryCallback);
// Add any cases that aren't already on the list.
for (CorrelationCase corrCase : queryCallback.getCases()) {
@@ -664,7 +720,7 @@ public class Persona {
* Gets all data sources that the persona appears in.
*
* @return Collection of data sources that the persona appears in, may be
- * empty.
+ * empty.
*
* @throws CentralRepoException
*/
@@ -678,10 +734,13 @@ public class Persona {
String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(correlationType);
String querySql = "SELECT case_id, data_source_id FROM " + tableName
- + " WHERE account_id = " + account.getId();
+ + " WHERE account_id = ?"; // param 1
+
+ List params = new ArrayList<>();
+ params.add(account.getId());
DatasourceForAccountInstanceQueryCallback queryCallback = new DatasourceForAccountInstanceQueryCallback();
- getCRInstance().executeSelectSQL(querySql, queryCallback);
+ getCRInstance().executeQuery(querySql, params, queryCallback);
// Add any data sources that aren't already on the list.
for (CorrelationDataSource correlationDatasource : queryCallback.getDataSources()) {
@@ -738,7 +797,9 @@ public class Persona {
* the X_instance table for the given account type.
*
* @param crAccountType Account type to generate the query string for.
+ *
* @return Query substring.
+ *
* @throws CentralRepoException
*/
private static String getPersonaFromInstanceTableQueryTemplate(CentralRepoAccount.CentralRepoAccountType crAccountType) throws CentralRepoException {
@@ -763,6 +824,7 @@ public class Persona {
* @param correlationCase Case to look the persona in.
*
* @return Collection of personas, may be empty.
+ *
* @throws CentralRepoException
*/
public static Collection getPersonasForCase(CorrelationCase correlationCase) throws CentralRepoException {
@@ -772,11 +834,15 @@ public class Persona {
for (CentralRepoAccount.CentralRepoAccountType crAccountType : accountTypes) {
String querySql = getPersonaFromInstanceTableQueryTemplate(crAccountType)
- + " WHERE case_id = " + correlationCase.getID()
- + "AND personas.status_id != " + Persona.PersonaStatus.DELETED.getStatusId();
+ + " WHERE case_id = ?" // param 1
+ + " AND personas.status_id != ?"; // param 2
+
+ List params = new ArrayList<>();
+ params.add(correlationCase.getID());
+ params.add(Persona.PersonaStatus.DELETED.getStatusId());
PersonaFromAccountInstanceQueryCallback queryCallback = new PersonaFromAccountInstanceQueryCallback();
- getCRInstance().executeSelectSQL(querySql, queryCallback);
+ getCRInstance().executeQuery(querySql, params, queryCallback);
// Add persona that aren't already on the list.
for (Persona persona : queryCallback.getPersonasList()) {
@@ -795,6 +861,7 @@ public class Persona {
* @param dataSource Data source to look the persona in.
*
* @return Collection of personas, may be empty.
+ *
* @throws CentralRepoException
*/
public static Collection getPersonasForDataSource(CorrelationDataSource dataSource) throws CentralRepoException {
@@ -804,11 +871,15 @@ public class Persona {
for (CentralRepoAccount.CentralRepoAccountType crAccountType : accountTypes) {
String querySql = getPersonaFromInstanceTableQueryTemplate(crAccountType)
- + " WHERE data_source_id = " + dataSource.getID()
- + "AND personas.status_id != " + Persona.PersonaStatus.DELETED.getStatusId();
+ + " WHERE data_source_id = ?"
+ + " AND personas.status_id != ?";
+
+ List params = new ArrayList<>();
+ params.add(dataSource.getID());
+ params.add(Persona.PersonaStatus.DELETED.getStatusId());
PersonaFromAccountInstanceQueryCallback queryCallback = new PersonaFromAccountInstanceQueryCallback();
- getCRInstance().executeSelectSQL(querySql, queryCallback);
+ getCRInstance().executeQuery(querySql, params, queryCallback);
// Add persona that aren't already on the list.
for (Persona persona : queryCallback.getPersonasList()) {
@@ -820,23 +891,22 @@ public class Persona {
}
return personaList;
}
-
-
+
/**
- * Wraps the call to CentralRepository.getInstance() throwing an
- * exception if instance is null;
- *
+ * Wraps the call to CentralRepository.getInstance() throwing an exception
+ * if instance is null;
+ *
* @return Instance of CentralRepository
- *
- * @throws CentralRepoException
+ *
+ * @throws CentralRepoException
*/
- private static CentralRepository getCRInstance() throws CentralRepoException {
+ private static CentralRepository getCRInstance() throws CentralRepoException {
CentralRepository instance = CentralRepository.getInstance();
-
- if(instance == null) {
+
+ if (instance == null) {
throw new CentralRepoException("Failed to get instance of CentralRespository, CR was null");
}
-
+
return instance;
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java
index 3a921dd483..917c988e1c 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAccount.java
@@ -24,9 +24,9 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
-import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Account;
/**
@@ -121,39 +121,47 @@ public class PersonaAccount {
/**
* Creates an account for the specified Persona.
*
- * @param persona Persona for which the account is being added.
- * @param account Account.
+ * @param persona Persona for which the account is being added.
+ * @param account Account.
* @param justification Reason for assigning the alias, may be null.
- * @param confidence Confidence level.
+ * @param confidence Confidence level.
*
* @return PersonaAccount
*
* @throws CentralRepoException If there is an error in creating the
- * account.
+ * account.
*/
static PersonaAccount addPersonaAccount(Persona persona, CentralRepoAccount account, String justification, Persona.Confidence confidence) throws CentralRepoException {
CentralRepoExaminer currentExaminer = getCRInstance().getOrInsertExaminer(System.getProperty("user.name"));
Instant instant = Instant.now();
Long timeStampMillis = instant.toEpochMilli();
- String insertClause = " INTO persona_accounts (persona_id, account_id, justification, confidence_id, date_added, examiner_id ) "
- + "VALUES ( "
- + persona.getId() + ", "
- + account.getId() + ", "
- + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', "
- + confidence.getLevelId() + ", "
- + timeStampMillis.toString() + ", "
- + currentExaminer.getId()
- + ")";
- getCRInstance().executeInsertSQL(insertClause);
+ String insertSQL = "INSERT INTO persona_accounts (persona_id, account_id, justification, confidence_id, date_added, examiner_id ) "
+ + " VALUES ( ?, ?, ?, ?, ?, ?)";
+
+ List params = new ArrayList<>();
+ params.add(persona.getId());
+ params.add(account.getId());
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(confidence.getLevelId());
+ params.add(timeStampMillis);
+ params.add(currentExaminer.getId());
+
+ getCRInstance().executeCommand(insertSQL, params);
+
+ String querySQL = PERSONA_ACCOUNTS_QUERY_CLAUSE
+ + "WHERE persona_id = ? "
+ + " AND account_type_id = ?"
+ + " AND account_unique_identifier = ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(persona.getId());
+ queryParams.add(account.getAccountType().getAccountTypeId());
+ queryParams.add(account.getIdentifier());
- String queryClause = PERSONA_ACCOUNTS_QUERY_CLAUSE
- + "WHERE persona_id = " + persona.getId()
- + " AND account_type_id = " + account.getAccountType().getAccountTypeId()
- + " AND account_unique_identifier = '" + account.getIdentifier() + "'";
PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(querySQL, queryParams, queryCallback);
Collection accounts = queryCallback.getPersonaAccountsList();
if (accounts.size() != 1) {
@@ -241,14 +249,17 @@ public class PersonaAccount {
* @return Collection of PersonaAccounts, may be empty.
*
* @throws CentralRepoException If there is an error in getting the
- * persona_account.
+ * persona_account.
*/
static Collection getPersonaAccountsForPersona(long personaId) throws CentralRepoException {
- String queryClause = PERSONA_ACCOUNTS_QUERY_CLAUSE
- + " WHERE persona_accounts.persona_id = " + personaId;
+ String querySQL = PERSONA_ACCOUNTS_QUERY_CLAUSE
+ + " WHERE persona_accounts.persona_id = ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(personaId);
PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(querySQL, queryParams, queryCallback);
return queryCallback.getPersonaAccountsList();
}
@@ -261,15 +272,19 @@ public class PersonaAccount {
* @return Collection of PersonaAccounts. may be empty.
*
* @throws CentralRepoException If there is an error in getting the
- * persona_account.
+ * persona_account.
*/
public static Collection getPersonaAccountsForAccount(long accountId) throws CentralRepoException {
- String queryClause = PERSONA_ACCOUNTS_QUERY_CLAUSE
- + " WHERE persona_accounts.account_id = " + accountId
- + " AND personas.status_id != " + Persona.PersonaStatus.DELETED.getStatusId();
+ String querySQL = PERSONA_ACCOUNTS_QUERY_CLAUSE
+ + " WHERE persona_accounts.account_id = ?"
+ + " AND personas.status_id != ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(accountId);
+ queryParams.add(Persona.PersonaStatus.DELETED.getStatusId());
PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(querySQL, queryParams, queryCallback);
return queryCallback.getPersonaAccountsList();
}
@@ -278,22 +293,25 @@ public class PersonaAccount {
* account identifier substring.
*
* @param accountIdentifierSubstring Account identifier substring to search
- * for.
+ * for.
*
* @return Collection of PersonaAccounts. may be empty.
*
* @throws CentralRepoException If there is an error in getting the
- * persona_account.
+ * persona_account.
*/
public static Collection getPersonaAccountsForIdentifierLike(String accountIdentifierSubstring) throws CentralRepoException {
- String queryClause = PERSONA_ACCOUNTS_QUERY_CLAUSE
- + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER('%" + accountIdentifierSubstring + "%')"
- + " AND personas.status_id != " + Persona.PersonaStatus.DELETED.getStatusId();
+ String querySQL = PERSONA_ACCOUNTS_QUERY_CLAUSE
+ + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER(?)"
+ + " AND personas.status_id != ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add("%" + accountIdentifierSubstring + "%"); // substring match
+ queryParams.add(Persona.PersonaStatus.DELETED.getStatusId());
PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(querySQL, queryParams, queryCallback);
return queryCallback.getPersonaAccountsList();
-
}
/**
@@ -302,18 +320,23 @@ public class PersonaAccount {
* @param account Account to search for.
*
* @return Collection of PersonaAccounts, maybe empty if none were found or
- * CR is not enabled.
+ * CR is not enabled.
*
* @throws CentralRepoException
*/
public static Collection getPersonaAccountsForAccount(Account account) throws CentralRepoException {
- String queryClause = PERSONA_ACCOUNTS_QUERY_CLAUSE
- + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER('%" + account.getTypeSpecificID() + "%')"
- + " AND type_name = '" + account.getAccountType().getTypeName() + "' "
- + " AND personas.status_id != " + Persona.PersonaStatus.DELETED.getStatusId();
+ String querySQL = PERSONA_ACCOUNTS_QUERY_CLAUSE
+ + " WHERE LOWER(accounts.account_unique_identifier) LIKE LOWER(?)"
+ + " AND type_name = ?"
+ + " AND personas.status_id != ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add("%" + account.getTypeSpecificID() + "%"); // substring match
+ queryParams.add(account.getAccountType().getTypeName());
+ queryParams.add(Persona.PersonaStatus.DELETED.getStatusId());
PersonaAccountsQueryCallback queryCallback = new PersonaAccountsQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(querySQL, queryParams, queryCallback);
return queryCallback.getPersonaAccountsList();
}
@@ -323,11 +346,14 @@ public class PersonaAccount {
* @param id row id for the account to be removed
*
* @throws CentralRepoException If there is an error in removing the
- * account.
+ * account.
*/
static void removePersonaAccount(long id) throws CentralRepoException {
- String deleteClause = " DELETE FROM persona_accounts WHERE id = " + id;
- getCRInstance().executeDeleteSQL(deleteClause);
+ String deleteSQL = " DELETE FROM persona_accounts WHERE id = ?";
+ List params = new ArrayList<>();
+ params.add(id);
+
+ getCRInstance().executeCommand(deleteSQL, params);
}
/**
@@ -336,11 +362,17 @@ public class PersonaAccount {
* @param id row id for the account to be removed
*
* @throws CentralRepoException If there is an error in removing the
- * account.
+ * account.
*/
static void modifyPersonaAccount(long id, Persona.Confidence confidence, String justification) throws CentralRepoException {
- String updateClause = "UPDATE persona_accounts SET confidence_id = " + confidence.getLevelId() + ", justification = '" + justification + "' WHERE id = " + id;
- getCRInstance().executeUpdateSQL(updateClause);
+ String updateSQL = "UPDATE persona_accounts SET confidence_id = ?, justification = ? WHERE id = ?";
+
+ List params = new ArrayList<>();
+ params.add(confidence.getLevelId());
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(id);
+
+ getCRInstance().executeCommand(updateSQL, params);
}
/**
@@ -378,10 +410,10 @@ public class PersonaAccount {
* @param personaId Id of the persona to look for.
*
* @return Collection of all accounts associated with the given persona, may
- * be empty.
+ * be empty.
*
* @throws CentralRepoException If there is an error in getting the
- * accounts.
+ * accounts.
*/
static Collection getAccountsForPersona(long personaId) throws CentralRepoException {
String queryClause = "SELECT account_id, "
@@ -390,10 +422,13 @@ public class PersonaAccount {
+ " FROM persona_accounts "
+ " JOIN accounts as accounts on persona_accounts.account_id = accounts.id "
+ " JOIN account_types as account_types on accounts.account_type_id = account_types.id "
- + " WHERE persona_accounts.persona_id = " + personaId;
+ + " WHERE persona_accounts.persona_id = ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(personaId);
AccountsForPersonaQueryCallback queryCallback = new AccountsForPersonaQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(queryClause, queryParams, queryCallback);
return queryCallback.getAccountsList();
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java
index 63fcb346b9..d2df0c63d5 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaAlias.java
@@ -24,20 +24,20 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import org.apache.commons.lang3.StringUtils;
-import org.sleuthkit.datamodel.SleuthkitCase;
/**
- * This class abstracts an alias assigned to a Persona.
- * A Persona may have multiple aliases.
- *
+ * This class abstracts an alias assigned to a Persona. A Persona may have
+ * multiple aliases.
+ *
*/
public class PersonaAlias {
-
- private static final String SELECT_QUERY_BASE =
- "SELECT pa.id, pa.persona_id, pa.alias, pa.justification, pa.confidence_id, pa.date_added, pa.examiner_id, e.login_name, e.display_name "
- + "FROM persona_alias as pa "
- + "INNER JOIN examiners as e ON e.id = pa.examiner_id ";
+
+ private static final String SELECT_QUERY_BASE
+ = "SELECT pa.id, pa.persona_id, pa.alias, pa.justification, pa.confidence_id, pa.date_added, pa.examiner_id, e.login_name, e.display_name "
+ + "FROM persona_alias as pa "
+ + "INNER JOIN examiners as e ON e.id = pa.examiner_id ";
private final long id;
private final long personaId;
@@ -46,7 +46,7 @@ public class PersonaAlias {
private final Persona.Confidence confidence;
private final long dateAdded;
private final CentralRepoExaminer examiner;
-
+
public long getId() {
return id;
}
@@ -74,7 +74,7 @@ public class PersonaAlias {
public CentralRepoExaminer getExaminer() {
return examiner;
}
-
+
public PersonaAlias(long id, long personaId, String alias, String justification, Persona.Confidence confidence, long dateAdded, CentralRepoExaminer examiner) {
this.id = id;
this.personaId = personaId;
@@ -84,8 +84,8 @@ public class PersonaAlias {
this.dateAdded = dateAdded;
this.examiner = examiner;
}
-
- /**
+
+ /**
* Creates an alias for the specified Persona.
*
* @param persona Persona for which the alias is being added.
@@ -103,35 +103,42 @@ public class PersonaAlias {
Instant instant = Instant.now();
Long timeStampMillis = instant.toEpochMilli();
- String insertClause = " INTO persona_alias (persona_id, alias, justification, confidence_id, date_added, examiner_id ) "
- + "VALUES ( "
- + persona.getId() + ", "
- + "'" + alias + "', "
- + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', "
- + confidence.getLevelId() + ", "
- + timeStampMillis.toString() + ", "
- + examiner.getId()
- + ")";
+ String insertSQL = "INSERT INTO persona_alias (persona_id, alias, justification, confidence_id, date_added, examiner_id ) "
+ + " VALUES ( ?, ?, ?, ?, ?, ?)";
+
+ List params = new ArrayList<>();
+ params.add(persona.getId());
+ params.add(alias);
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(confidence.getLevelId());
+ params.add(timeStampMillis);
+ params.add(examiner.getId());
+
+ getCRInstance().executeCommand(insertSQL, params);
- getCRInstance().executeInsertSQL(insertClause);
-
String queryClause = SELECT_QUERY_BASE
- + "WHERE pa.persona_id = " + persona.getId()
- + " AND pa.alias = '" + alias + "'"
- + " AND pa.date_added = " + timeStampMillis
- + " AND pa.examiner_id = " + examiner.getId();
-
+ + "WHERE pa.persona_id = ?"
+ + " AND pa.alias = ?"
+ + " AND pa.date_added = ?"
+ + " AND pa.examiner_id = ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(persona.getId());
+ queryParams.add(alias);
+ queryParams.add(timeStampMillis);
+ queryParams.add(examiner.getId());
+
PersonaAliasesQueryCallback queryCallback = new PersonaAliasesQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
-
+ getCRInstance().executeQuery(queryClause, queryParams, queryCallback);
+
Collection aliases = queryCallback.getAliases();
if (aliases.size() != 1) {
throw new CentralRepoException("Alias add query failed");
}
-
+
return aliases.iterator().next();
}
-
+
/**
* Removes a PersonaAlias.
*
@@ -140,10 +147,14 @@ public class PersonaAlias {
* @throws CentralRepoException If there is an error in removing the alias.
*/
static void removePersonaAlias(PersonaAlias alias) throws CentralRepoException {
- String deleteClause = " DELETE FROM persona_alias WHERE id = " + alias.getId();
- getCRInstance().executeDeleteSQL(deleteClause);
+ String deleteSQL = " DELETE FROM persona_alias WHERE id = ?";
+
+ List params = new ArrayList<>();
+ params.add(alias.getId());
+
+ getCRInstance().executeCommand(deleteSQL, params);
}
-
+
/**
* Modifies a PesronaAlias.
*
@@ -153,17 +164,22 @@ public class PersonaAlias {
*/
static void modifyPersonaAlias(PersonaAlias alias, Persona.Confidence confidence, String justification) throws CentralRepoException {
CentralRepository cr = CentralRepository.getInstance();
-
+
if (cr == null) {
throw new CentralRepoException("Failed to modify persona alias, Central Repo is not enabled");
}
-
- String updateClause = "UPDATE persona_alias SET confidence_id = " + confidence.getLevelId() + ", justification = '" + justification + "' WHERE id = " + alias.id;
- cr.executeUpdateSQL(updateClause);
+
+ String updateClause = "UPDATE persona_alias SET confidence_id = ?, justification = ? WHERE id = ?";
+
+ List params = new ArrayList<>();
+ params.add(confidence.getLevelId());
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(alias.getId());
+
+ cr.executeCommand(updateClause, params);
}
-
-
- /**
+
+ /**
* Callback to process a Persona aliases query.
*/
static class PersonaAliasesQueryCallback implements CentralRepositoryDbQueryCallback {
@@ -195,7 +211,7 @@ public class PersonaAlias {
return Collections.unmodifiableCollection(personaAliases);
}
};
-
+
/**
* Gets all aliases for the persona with specified id.
*
@@ -205,29 +221,33 @@ public class PersonaAlias {
* @throws CentralRepoException If there is an error in retrieving aliases.
*/
public static Collection getPersonaAliases(long personaId) throws CentralRepoException {
- String queryClause = SELECT_QUERY_BASE + "WHERE pa.persona_id = " + personaId;
+ String queryClause = SELECT_QUERY_BASE
+ + "WHERE pa.persona_id = ?";
+
+ List params = new ArrayList<>();
+ params.add(personaId);
PersonaAliasesQueryCallback queryCallback = new PersonaAliasesQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getAliases();
}
-
+
/**
- * Wraps the call to CentralRepository.getInstance() throwing an
- * exception if instance is null;
- *
+ * Wraps the call to CentralRepository.getInstance() throwing an exception
+ * if instance is null;
+ *
* @return Instance of CentralRepository
- *
- * @throws CentralRepoException
+ *
+ * @throws CentralRepoException
*/
- private static CentralRepository getCRInstance() throws CentralRepoException {
+ private static CentralRepository getCRInstance() throws CentralRepoException {
CentralRepository instance = CentralRepository.getInstance();
-
- if(instance == null) {
+
+ if (instance == null) {
throw new CentralRepoException("Failed to get instance of CentralRespository, CR was null");
}
-
+
return instance;
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java
index a3fc414730..55830e9c54 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PersonaMetadata.java
@@ -24,23 +24,23 @@ import java.time.Instant;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.List;
import org.apache.commons.lang3.StringUtils;
-import org.sleuthkit.datamodel.SleuthkitCase;
/**
- * This class abstracts metadata associated with a Persona.
- * Metadata is in the form of a name/value pair.
- *
+ * This class abstracts metadata associated with a Persona. Metadata is in the
+ * form of a name/value pair.
+ *
* A Persona may have zero or more metadata.
- *
+ *
*/
public class PersonaMetadata {
-
- private static final String SELECT_QUERY_BASE =
- "SELECT pmd.id, pmd.persona_id, pmd.name, pmd.value, pmd.justification, pmd.confidence_id, pmd.date_added, pmd.examiner_id, e.login_name, e.display_name "
- + "FROM persona_metadata as pmd "
- + "INNER JOIN examiners as e ON e.id = pmd.examiner_id ";
-
+
+ private static final String SELECT_QUERY_BASE
+ = "SELECT pmd.id, pmd.persona_id, pmd.name, pmd.value, pmd.justification, pmd.confidence_id, pmd.date_added, pmd.examiner_id, e.login_name, e.display_name "
+ + "FROM persona_metadata as pmd "
+ + "INNER JOIN examiners as e ON e.id = pmd.examiner_id ";
+
private final long id;
private final long personaId;
private final String name;
@@ -49,7 +49,7 @@ public class PersonaMetadata {
private final Persona.Confidence confidence;
private final long dateAdded;
private final CentralRepoExaminer examiner;
-
+
public long getId() {
return id;
}
@@ -81,7 +81,7 @@ public class PersonaMetadata {
public CentralRepoExaminer getExaminer() {
return examiner;
}
-
+
public PersonaMetadata(long id, long personaId, String name, String value, String justification, Persona.Confidence confidence, long dateAdded, CentralRepoExaminer examiner) {
this.id = id;
this.personaId = personaId;
@@ -92,8 +92,8 @@ public class PersonaMetadata {
this.dateAdded = dateAdded;
this.examiner = examiner;
}
-
- /**
+
+ /**
* Adds specified metadata to the given persona.
*
* @param personaId Id of persona to add metadata for.
@@ -112,67 +112,87 @@ public class PersonaMetadata {
Instant instant = Instant.now();
Long timeStampMillis = instant.toEpochMilli();
- String insertClause = " INTO persona_metadata (persona_id, name, value, justification, confidence_id, date_added, examiner_id ) "
- + "VALUES ( "
- + personaId + ", "
- + "'" + name + "', "
- + "'" + value + "', "
- + "'" + ((StringUtils.isBlank(justification) ? "" : SleuthkitCase.escapeSingleQuotes(justification))) + "', "
- + confidence.getLevelId() + ", "
- + timeStampMillis.toString() + ", "
- + examiner.getId()
- + ")";
+ String insertSQL = "INSERT INTO persona_metadata (persona_id, name, value, justification, confidence_id, date_added, examiner_id ) "
+ + "VALUES ( ?, ?, ?, ?, ?, ?, ?)";
+
+ List params = new ArrayList<>();
+ params.add(personaId);
+ params.add(name);
+ params.add(value);
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(confidence.getLevelId());
+ params.add(timeStampMillis);
+ params.add(examiner.getId());
+
+ getCRInstance().executeCommand(insertSQL, params);
- getCRInstance().executeInsertSQL(insertClause);
-
String queryClause = SELECT_QUERY_BASE
- + "WHERE pmd.persona_id = " + personaId
- + " AND pmd.name = '" + name + "'"
- + " AND pmd.value = '" + value + "'"
- + " AND pmd.date_added = " + timeStampMillis
- + " AND pmd.examiner_id = " + examiner.getId();
-
+ + "WHERE pmd.persona_id = ?"
+ + " AND pmd.name = ?"
+ + " AND pmd.value = ?"
+ + " AND pmd.date_added = ?"
+ + " AND pmd.examiner_id = ?";
+
+ List queryParams = new ArrayList<>();
+ queryParams.add(personaId);
+ queryParams.add(name);
+ queryParams.add(value);
+ queryParams.add(timeStampMillis);
+ queryParams.add(examiner.getId());
+
PersonaMetadataQueryCallback queryCallback = new PersonaMetadataQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
-
+ getCRInstance().executeQuery(queryClause, queryParams, queryCallback);
+
Collection metadata = queryCallback.getMetadataList();
if (metadata.size() != 1) {
throw new CentralRepoException("Metadata add query failed");
}
-
+
return metadata.iterator().next();
}
-
+
/**
* Removes the given PersonaMetadata
*
* @param metadata Metadata to remove.
*
- * @throws CentralRepoException If there is an error in removing the metadata.
+ * @throws CentralRepoException If there is an error in removing the
+ * metadata.
*/
static void removePersonaMetadata(PersonaMetadata metadata) throws CentralRepoException {
- String deleteClause = " DELETE FROM persona_metadata WHERE id = " + metadata.getId();
- getCRInstance().executeDeleteSQL(deleteClause);
+ String deleteSql = " DELETE FROM persona_metadata WHERE id = ?";
+
+ List params = new ArrayList<>();
+ params.add(metadata.getId());
+
+ getCRInstance().executeCommand(deleteSql, params);
}
-
+
/**
* Modifies the given PersonaMetadata
*
* @param metadata Metadata to modify.
*
- * @throws CentralRepoException If there is an error in modifying the metadata.
+ * @throws CentralRepoException If there is an error in modifying the
+ * metadata.
*/
static void modifyPersonaMetadata(PersonaMetadata metadata, Persona.Confidence confidence, String justification) throws CentralRepoException {
CentralRepository cr = CentralRepository.getInstance();
-
+
if (cr == null) {
throw new CentralRepoException("Failed to modify persona metadata, Central Repo is not enabled");
}
-
- String updateClause = "UPDATE persona_metadata SET confidence_id = " + confidence.getLevelId() + ", justification = '" + justification + "' WHERE id = " + metadata.id;
- cr.executeUpdateSQL(updateClause);
+
+ String updateSql = "UPDATE persona_metadata SET confidence_id = ?, justification = ? WHERE id = ?";
+
+ List params = new ArrayList<>();
+ params.add(confidence.getLevelId());
+ params.add(StringUtils.isBlank(justification) ? "" : justification);
+ params.add(metadata.id);
+
+ getCRInstance().executeCommand(updateSql, params);
}
-
+
/**
* Callback to process a Persona metadata query.
*/
@@ -206,8 +226,8 @@ public class PersonaMetadata {
return Collections.unmodifiableCollection(personaMetadataList);
}
};
-
- /**
+
+ /**
* Gets all metadata for the persona with specified id.
*
* @param personaId Id of the persona for which to get the metadata.
@@ -216,30 +236,34 @@ public class PersonaMetadata {
* @throws CentralRepoException If there is an error in retrieving aliases.
*/
static Collection getPersonaMetadata(long personaId) throws CentralRepoException {
- String queryClause = SELECT_QUERY_BASE + "WHERE pmd.persona_id = " + personaId;
-
+ String queryClause = SELECT_QUERY_BASE
+ + "WHERE pmd.persona_id = ?";
+
+ List params = new ArrayList<>();
+ params.add(personaId);
+
PersonaMetadataQueryCallback queryCallback = new PersonaMetadataQueryCallback();
- getCRInstance().executeSelectSQL(queryClause, queryCallback);
+ getCRInstance().executeQuery(queryClause, params, queryCallback);
return queryCallback.getMetadataList();
}
-
+
/**
- * Wraps the call to CentralRepository.getInstance() throwing an
- * exception if instance is null;
- *
+ * Wraps the call to CentralRepository.getInstance() throwing an exception
+ * if instance is null;
+ *
* @return Instance of CentralRepository
- *
- * @throws CentralRepoException
+ *
+ * @throws CentralRepoException
*/
- private static CentralRepository getCRInstance() throws CentralRepoException {
+ private static CentralRepository getCRInstance() throws CentralRepoException {
CentralRepository instance = CentralRepository.getInstance();
-
- if(instance == null) {
+
+ if (instance == null) {
throw new CentralRepoException("Failed to get instance of CentralRespository, CR was null");
}
-
+
return instance;
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java
index 02c9028d27..4885dd3cf4 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/RdbmsCentralRepo.java
@@ -41,7 +41,6 @@ import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
-import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
@@ -1087,25 +1086,22 @@ abstract class RdbmsCentralRepo implements CentralRepository {
String normalizedAccountID = CentralRepoAccount.normalizeAccountIdentifier(crAccountType, accountUniqueID);
CentralRepoAccount account = getAccount(crAccountType, normalizedAccountID);
- // account not found in the table, create it
- if (null == account) {
+ String insertSQL = "INSERT INTO accounts (account_type_id, account_unique_identifier) "
+ + "VALUES (?, ?) " + getConflictClause();
- String query = "INSERT INTO accounts (account_type_id, account_unique_identifier) "
- + "VALUES ( " + crAccountType.getAccountTypeId() + ", '"
- + normalizedAccountID + "' )";
+ try (Connection connection = connect();
+ PreparedStatement preparedStatement = connection.prepareStatement(insertSQL);) {
- try (Connection connection = connect();
- Statement s = connection.createStatement();) {
+ preparedStatement.setInt(1, crAccountType.getAccountTypeId());
+ preparedStatement.setString(2, normalizedAccoun);
- s.execute(query);
- // get the account from the db - should exist now.
- account = getAccount(crAccountType, accountUniqueID);
- } catch (SQLException ex) {
- throw new CentralRepoException("Error adding an account to CR database.", ex);
- }
+ preparedStatement.executeUpdate();
+
+ // get the account from the db - should exist now.
+ return getAccount(crAccountType, normalizedAccount);
+ } catch (SQLException ex) {
+ throw new CentralRepoException("Error adding an account to CR database.", ex);
}
-
- return account;
}
@Override
@@ -2545,89 +2541,52 @@ abstract class RdbmsCentralRepo implements CentralRepository {
}
@Override
- public void executeInsertSQL(String insertClause) throws CentralRepoException {
-
- if (insertClause == null) {
- throw new CentralRepoException("Insert SQL is null");
- }
-
- String sql = getPlatformSpecificInsertSQL(insertClause);
- try (Connection conn = connect();
- PreparedStatement preparedStatement = conn.prepareStatement(sql);) {
+ public void executeCommand(String sql, List params) throws CentralRepoException {
+
+ try (Connection conn = connect();) {
+
+ PreparedStatement preparedStatement = conn.prepareStatement(sql);
+
+ // Fill in the params
+ if (params != null) {
+ int paramIndex = 1;
+ for (Object param : params) {
+ preparedStatement.setObject(paramIndex, param);
+ paramIndex += 1;
+ }
+ }
+ // execute the prepared statement
preparedStatement.executeUpdate();
} catch (SQLException ex) {
- throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", sql, ex.getMessage()), ex);
+ throw new CentralRepoException(String.format("Error executing prepared statement for SQL %s", sql), ex);
}
}
@Override
- public void executeSelectSQL(String selectSQL, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException {
+ public void executeQuery(String sql, List params, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException {
if (queryCallback == null) {
throw new CentralRepoException("Query callback is null");
}
- if (selectSQL == null) {
- throw new CentralRepoException("Select SQL is null");
- }
-
- StringBuilder sqlSb = new StringBuilder(QUERY_STR_MAX_LEN);
- if (selectSQL.trim().toUpperCase().startsWith("SELECT") == false) {
- sqlSb.append("SELECT ");
- }
-
- sqlSb.append(selectSQL);
-
- try (Connection conn = connect();
- PreparedStatement preparedStatement = conn.prepareStatement(sqlSb.toString());
- ResultSet resultSet = preparedStatement.executeQuery();) {
- queryCallback.process(resultSet);
+
+ try ( Connection conn = connect();) {
+ PreparedStatement preparedStatement = conn.prepareStatement(sql);
+
+ // fill in the params
+ if (params != null) {
+ int paramIndex = 1;
+ for (Object param : params) {
+ preparedStatement.setObject(paramIndex, param);
+ paramIndex += 1;
+ }
+ }
+ // execute query, and the callback to process result
+ try (ResultSet resultSet = preparedStatement.executeQuery();) {
+ queryCallback.process(resultSet);
+ }
} catch (SQLException ex) {
- throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", selectSQL, ex.getMessage()), ex);
- }
- }
-
- @Override
- public void executeUpdateSQL(String updateSQL) throws CentralRepoException {
-
- if (updateSQL == null) {
- throw new CentralRepoException("Update SQL is null");
- }
-
- StringBuilder sqlSb = new StringBuilder(QUERY_STR_MAX_LEN);
- if (updateSQL.trim().toUpperCase().startsWith("UPDATE") == false) {
- sqlSb.append("UPDATE ");
- }
-
- sqlSb.append(updateSQL);
-
- try (Connection conn = connect();
- PreparedStatement preparedStatement = conn.prepareStatement(sqlSb.toString());) {
- preparedStatement.executeUpdate();
- } catch (SQLException ex) {
- throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", updateSQL, ex.getMessage()), ex);
- }
- }
-
- @Override
- public void executeDeleteSQL(String deleteSQL) throws CentralRepoException {
-
- if (deleteSQL == null) {
- throw new CentralRepoException("Delete SQL is null");
- }
-
- StringBuilder sqlSb = new StringBuilder(QUERY_STR_MAX_LEN);
- if (deleteSQL.trim().toUpperCase().startsWith("DELETE") == false) {
- sqlSb.append("DELETE ");
- }
-
- sqlSb.append(deleteSQL);
-
- try (Connection conn = connect();
- PreparedStatement preparedStatement = conn.prepareStatement(sqlSb.toString());) {
- preparedStatement.executeUpdate();
- } catch (SQLException ex) {
- throw new CentralRepoException(String.format("Error running SQL %s, exception = %s", deleteSQL, ex.getMessage()), ex);
- }
+ throw new CentralRepoException(String.format("Error executing prepared statement for SQL query %s", sql), ex);
+ }
}
@Override
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java
index 3d2dfc09b0..df0327469d 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteCentralRepo.java
@@ -832,48 +832,27 @@ final class SqliteCentralRepo extends RdbmsCentralRepo {
releaseSharedLock();
}
}
-
+
@Override
- public void executeInsertSQL(String insertSQL) throws CentralRepoException {
+ public void executeCommand(String sql, List params) throws CentralRepoException {
try {
- acquireSharedLock();
- super.executeInsertSQL(insertSQL);
+ acquireExclusiveLock();
+ super.executeCommand(sql, params);
} finally {
- releaseSharedLock();
+ releaseExclusiveLock();
}
}
@Override
- public void executeSelectSQL(String selectSQL, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException {
+ public void executeQuery(String sql, List params, CentralRepositoryDbQueryCallback queryCallback) throws CentralRepoException {
try {
acquireSharedLock();
- super.executeSelectSQL(selectSQL, queryCallback);
+ super.executeQuery(sql, params, queryCallback);
} finally {
releaseSharedLock();
}
}
- @Override
- public void executeUpdateSQL(String updateSQL) throws CentralRepoException {
- try {
- acquireSharedLock();
- super.executeUpdateSQL(updateSQL);
- } finally {
- releaseSharedLock();
- }
- }
-
- @Override
- public void executeDeleteSQL(String deleteSQL) throws CentralRepoException {
- try {
- acquireSharedLock();
- super.executeDeleteSQL(deleteSQL);
- } finally {
- releaseSharedLock();
- }
- }
-
-
/**
* Check whether a reference set with the given name/version is in the
* central repo. Used to check for name collisions when creating reference
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties
index 26463c8818..5a218ba942 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties
@@ -70,7 +70,7 @@ PersonasTopComponent.deleteBtn.text=Delete Persona
PersonasTopComponent.editBtn.text=Edit Persona
PersonasTopComponent.createBtn.text=New Persona
PersonasTopComponent.createAccountBtn.text=Create Account
-PersonasTopComponent.searchBtn.text=Search
+PersonasTopComponent.searchBtn.text=Show
PersonasTopComponent.resultsTable.columnModel.title1=Name
PersonasTopComponent.resultsTable.columnModel.title0=ID
PersonasTopComponent.resultsTable.toolTipText=
@@ -80,3 +80,4 @@ CreatePersonaAccountDialog.identifierTextField.text=
CreatePersonaAccountDialog.identiferLbl.text=Identifier:
CreatePersonaAccountDialog.okBtn.text=OK
PersonasTopComponent.introText.text=Personas represent an online identity. They span cases and are stored in the Central Repository based on accounts that were found in artifacts. You can create, edit, and delete personas here.
+PersonasTopComponent.cbFilterByKeyword.text=Filter personas by keyword
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties-MERGED
index 2c154d13f2..123bd71800 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/Bundle.properties-MERGED
@@ -116,7 +116,7 @@ PersonasTopComponent.deleteBtn.text=Delete Persona
PersonasTopComponent.editBtn.text=Edit Persona
PersonasTopComponent.createBtn.text=New Persona
PersonasTopComponent.createAccountBtn.text=Create Account
-PersonasTopComponent.searchBtn.text=Search
+PersonasTopComponent.searchBtn.text=Show
PersonasTopComponent.resultsTable.columnModel.title1=Name
PersonasTopComponent.resultsTable.columnModel.title0=ID
PersonasTopComponent.resultsTable.toolTipText=
@@ -126,10 +126,12 @@ CreatePersonaAccountDialog.identifierTextField.text=
CreatePersonaAccountDialog.identiferLbl.text=Identifier:
CreatePersonaAccountDialog.okBtn.text=OK
PersonasTopComponent.introText.text=Personas represent an online identity. They span cases and are stored in the Central Repository based on accounts that were found in artifacts. You can create, edit, and delete personas here.
+PersonasTopComponent.cbFilterByKeyword.text=Filter personas by keyword
PersonasTopComponent_delete_confirmation_msg=Are you sure you want to delete this persona?
PersonasTopComponent_delete_confirmation_Title=Are you sure?
PersonasTopComponent_delete_exception_msg=Failed to delete persona.
PersonasTopComponent_delete_exception_Title=Delete failure
PersonasTopComponent_Name=Personas
+PersonasTopComponent_noCR_msg=Central Repository is not enabled.
PersonasTopComponent_search_exception_msg=Failed to search personas.
PersonasTopComponent_search_exception_Title=Search failure
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/CreatePersonaAccountDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/CreatePersonaAccountDialog.java
index 2afcdf0676..cfdf990710 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/CreatePersonaAccountDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/CreatePersonaAccountDialog.java
@@ -28,6 +28,7 @@ import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.ListCellRenderer;
import javax.swing.SwingUtilities;
+import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoAccount;
@@ -246,7 +247,7 @@ public class CreatePersonaAccountDialog extends JDialog {
"CreatePersonaAccountDialog_success_msg=Account added.",
})
private void okBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okBtnActionPerformed
- if (identifierTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(identifierTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaAccountDialog_identifier_empty_msg(),
Bundle.PersonaAccountDialog_identifier_empty_Title(),
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAccountDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAccountDialog.java
index b6854b1160..558f92619e 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAccountDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAccountDialog.java
@@ -28,6 +28,7 @@ import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.ListCellRenderer;
import javax.swing.SwingUtilities;
+import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoAccount;
@@ -282,14 +283,14 @@ public class PersonaAccountDialog extends JDialog {
"PersonaAccountDialog_invalid_account_msg=Account identifier is not valid.",
})
private void okBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okBtnActionPerformed
- if (identifierTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(identifierTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaAccountDialog_identifier_empty_msg(),
Bundle.PersonaAccountDialog_identifier_empty_Title(),
JOptionPane.ERROR_MESSAGE);
return;
}
- if (justificationTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(justificationTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaDetailsPanel_empty_justification_msg(),
Bundle.PersonaDetailsPanel_empty_justification_Title(),
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAliasDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAliasDialog.java
index 46f70749a6..1ada0a1025 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAliasDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaAliasDialog.java
@@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.centralrepository.persona;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
+import org.apache.commons.lang.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.Persona;
@@ -200,14 +201,14 @@ public class PersonaAliasDialog extends JDialog {
"PersonaAliasDialog_dup_Title=Alias add failure",
"PersonaAliasDialog_dup_msg=This alias has already been added to this persona.",})
private void okBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okBtnActionPerformed
- if (aliasTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(aliasTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaAliasDialog_empty_msg(),
Bundle.PersonaAliasDialog_empty_Title(),
JOptionPane.ERROR_MESSAGE);
return;
}
- if (justificationTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(justificationTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaDetailsPanel_empty_justification_msg(),
Bundle.PersonaDetailsPanel_empty_justification_Title(),
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaDetailsPanel.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaDetailsPanel.java
index 032505a9a4..edcb36c353 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaDetailsPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaDetailsPanel.java
@@ -34,6 +34,7 @@ import javax.swing.JTable;
import javax.swing.ListSelectionModel;
import javax.swing.event.ListSelectionEvent;
import javax.swing.table.DefaultTableModel;
+import org.apache.commons.lang.StringUtils;
import org.openide.windows.TopComponent;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.RetainLocation;
@@ -891,14 +892,14 @@ public final class PersonaDetailsPanel extends javax.swing.JPanel {
return null;
}
- if (commentField.getText().isEmpty()) {
+ if (StringUtils.isBlank(commentField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaDetailsPanel_EmptyComment_msg(),
Bundle.PersonaDetailsPanel_EmptyComment_Title(),
JOptionPane.ERROR_MESSAGE);
return null;
}
- if (nameField.getText().isEmpty()) {
+ if (StringUtils.isBlank(nameField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaDetailsPanel_EmptyName_msg(),
Bundle.PersonaDetailsPanel_EmptyName_Title(),
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaMetadataDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaMetadataDialog.java
index 40cc893ed1..15fad56114 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaMetadataDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonaMetadataDialog.java
@@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.centralrepository.persona;
import javax.swing.JDialog;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
+import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.Persona;
@@ -216,14 +217,14 @@ public class PersonaMetadataDialog extends JDialog {
"AddMetadataDialog_empty_name_Title=Missing field(s)",
"AddMetadataDialog_empty_name_msg=A metadata entry cannot have an empty name or value.",})
private void okBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okBtnActionPerformed
- if (nameTextField.getText().isEmpty() || valueTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(nameTextField.getText()) || StringUtils.isBlank(valueTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.AddMetadataDialog_empty_name_msg(),
Bundle.AddMetadataDialog_empty_name_Title(),
JOptionPane.ERROR_MESSAGE);
return;
}
- if (justificationTextField.getText().isEmpty()) {
+ if (StringUtils.isBlank(justificationTextField.getText())) {
JOptionPane.showMessageDialog(this,
Bundle.PersonaDetailsPanel_empty_justification_msg(),
Bundle.PersonaDetailsPanel_empty_justification_Title(),
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.form b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.form
index 45edb3e159..9df2d2ba55 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.form
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.form
@@ -32,7 +32,7 @@
-
+
@@ -82,7 +82,7 @@
-
+
@@ -105,8 +105,9 @@
+
-
+
@@ -116,7 +117,8 @@
-
+
+
@@ -125,7 +127,7 @@
-
+
@@ -252,6 +254,16 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.java b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.java
index 7b1b95afa3..d38b379078 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/persona/PersonasTopComponent.java
@@ -27,6 +27,8 @@ import java.util.logging.Level;
import javax.swing.JOptionPane;
import javax.swing.JTable;
import javax.swing.ListSelectionModel;
+import javax.swing.event.AncestorListener;
+import javax.swing.event.AncestorEvent;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.DefaultTableModel;
@@ -37,6 +39,7 @@ import org.openide.windows.RetainLocation;
import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.Persona;
import org.sleuthkit.autopsy.coreutils.Logger;
@@ -49,7 +52,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
@RetainLocation("personas")
@SuppressWarnings("PMD.SingularField")
public final class PersonasTopComponent extends TopComponent {
-
+
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(PersonasTopComponent.class.getName());
@@ -57,17 +60,37 @@ public final class PersonasTopComponent extends TopComponent {
private List currentResults = null;
private Persona selectedPersona = null;
+ /**
+ * Listens for when this component will be rendered and executes a search to
+ * update gui when it is displayed.
+ */
+ private final AncestorListener onAddListener = new AncestorListener() {
+ @Override
+ public void ancestorAdded(AncestorEvent event) {
+ resetSearchControls();
+ setKeywordSearchEnabled(false, true);
+ }
+
+ @Override
+ public void ancestorRemoved(AncestorEvent event) {
+ //Empty
+ }
+
+ @Override
+ public void ancestorMoved(AncestorEvent event) {
+ //Empty
+ }
+ };
+
@Messages({
"PersonasTopComponent_Name=Personas",
"PersonasTopComponent_delete_exception_Title=Delete failure",
"PersonasTopComponent_delete_exception_msg=Failed to delete persona.",
"PersonasTopComponent_delete_confirmation_Title=Are you sure?",
- "PersonasTopComponent_delete_confirmation_msg=Are you sure you want to delete this persona?",
- })
+ "PersonasTopComponent_delete_confirmation_msg=Are you sure you want to delete this persona?",})
public PersonasTopComponent() {
initComponents();
setName(Bundle.PersonasTopComponent_Name());
- executeSearch();
searchBtn.addActionListener(new ActionListener() {
@Override
@@ -91,14 +114,14 @@ public final class PersonasTopComponent extends TopComponent {
PersonaDetailsMode.CREATE, selectedPersona, new CreateEditCallbackImpl());
}
});
-
+
deleteBtn.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
NotifyDescriptor confirm = new NotifyDescriptor.Confirmation(
- Bundle.PersonasTopComponent_delete_confirmation_msg(),
- Bundle.PersonasTopComponent_delete_confirmation_Title(),
- NotifyDescriptor.YES_NO_OPTION);
+ Bundle.PersonasTopComponent_delete_confirmation_msg(),
+ Bundle.PersonasTopComponent_delete_confirmation_Title(),
+ NotifyDescriptor.YES_NO_OPTION);
DialogDisplayer.getDefault().notify(confirm);
if (confirm.getValue().equals(NotifyDescriptor.YES_OPTION)) {
try {
@@ -108,9 +131,9 @@ public final class PersonasTopComponent extends TopComponent {
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Failed to delete persona: " + selectedPersona.getName(), ex);
JOptionPane.showMessageDialog(PersonasTopComponent.this,
- Bundle.PersonasTopComponent_delete_exception_msg(),
- Bundle.PersonasTopComponent_delete_exception_Title(),
- JOptionPane.ERROR_MESSAGE);
+ Bundle.PersonasTopComponent_delete_exception_msg(),
+ Bundle.PersonasTopComponent_delete_exception_Title(),
+ JOptionPane.ERROR_MESSAGE);
return;
}
executeSearch();
@@ -126,21 +149,23 @@ public final class PersonasTopComponent extends TopComponent {
handleSelectionChange(e);
}
});
-
+
searchNameRadio.addActionListener((ActionEvent e) -> {
searchField.setText("");
});
-
+
searchAccountRadio.addActionListener((ActionEvent e) -> {
searchField.setText("");
});
-
+
createAccountBtn.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
new CreatePersonaAccountDialog(detailsPanel);
}
});
+
+ addAncestorListener(onAddListener);
}
/**
@@ -161,6 +186,34 @@ public final class PersonasTopComponent extends TopComponent {
}
}
+ /**
+ * Resets search controls to default state.
+ */
+ private void resetSearchControls() {
+ searchField.setText("");
+ searchNameRadio.setSelected(true);
+ searchAccountRadio.setSelected(false);
+ }
+
+ /**
+ * Sets up the GUI for appropriate state for keyword search enabled state.
+ *
+ * @param selected Whether or not keyword search is enabled.
+ * @param setFilterCb Whether or not the filter checkbox should be
+ * manipulated as a part of this change.
+ */
+ private void setKeywordSearchEnabled(boolean selected, boolean setFilterCb) {
+ if (setFilterCb && cbFilterByKeyword.isSelected() != selected) {
+ cbFilterByKeyword.setSelected(selected);
+ }
+
+ searchField.setEnabled(selected);
+ searchNameRadio.setEnabled(selected);
+ searchAccountRadio.setEnabled(selected);
+
+ executeSearch();
+ }
+
void setPersona(int index) {
Persona persona = currentResults.get(index);
selectedPersona = persona;
@@ -224,14 +277,29 @@ public final class PersonasTopComponent extends TopComponent {
@Messages({
"PersonasTopComponent_search_exception_Title=Search failure",
- "PersonasTopComponent_search_exception_msg=Failed to search personas.",})
+ "PersonasTopComponent_search_exception_msg=Failed to search personas.",
+ "PersonasTopComponent_noCR_msg=Central Repository is not enabled.",})
private void executeSearch() {
+ // To prevent downstream failures, only execute search if central repository is enabled
+ if (!CentralRepository.isEnabled()) {
+ logger.log(Level.SEVERE, "Central Repository is not enabled, but execute search was called.");
+ JOptionPane.showMessageDialog(this,
+ Bundle.PersonasTopComponent_search_exception_Title(),
+ Bundle.PersonasTopComponent_noCR_msg(),
+ JOptionPane.ERROR_MESSAGE);
+ return;
+ }
+
Collection results;
try {
- if (searchNameRadio.isSelected()) {
- results = Persona.getPersonaByName(searchField.getText());
+ if (cbFilterByKeyword.isSelected()) {
+ if (searchNameRadio.isSelected()) {
+ results = Persona.getPersonaByName(searchField.getText());
+ } else {
+ results = Persona.getPersonaByAccountIdentifierLike(searchField.getText());
+ }
} else {
- results = Persona.getPersonaByAccountIdentifierLike(searchField.getText());
+ results = Persona.getPersonaByName("");
}
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Failed to search personas", ex);
@@ -278,6 +346,7 @@ public final class PersonasTopComponent extends TopComponent {
deleteBtn = new javax.swing.JButton();
createButtonSeparator = new javax.swing.JSeparator();
createBtn = new javax.swing.JButton();
+ cbFilterByKeyword = new javax.swing.JCheckBox();
detailsScrollPane = new javax.swing.JScrollPane();
detailsPanel = new org.sleuthkit.autopsy.centralrepository.persona.PersonaDetailsPanel();
@@ -326,6 +395,13 @@ public final class PersonasTopComponent extends TopComponent {
org.openide.awt.Mnemonics.setLocalizedText(createBtn, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.createBtn.text")); // NOI18N
+ org.openide.awt.Mnemonics.setLocalizedText(cbFilterByKeyword, org.openide.util.NbBundle.getMessage(PersonasTopComponent.class, "PersonasTopComponent.cbFilterByKeyword.text")); // NOI18N
+ cbFilterByKeyword.addActionListener(new java.awt.event.ActionListener() {
+ public void actionPerformed(java.awt.event.ActionEvent evt) {
+ cbFilterByKeywordActionPerformed(evt);
+ }
+ });
+
javax.swing.GroupLayout searchPanelLayout = new javax.swing.GroupLayout(searchPanel);
searchPanel.setLayout(searchPanelLayout);
searchPanelLayout.setHorizontalGroup(
@@ -350,14 +426,16 @@ public final class PersonasTopComponent extends TopComponent {
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(editBtn)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
- .addComponent(deleteBtn)))
- .addGap(0, 0, Short.MAX_VALUE)))
+ .addComponent(deleteBtn))
+ .addComponent(cbFilterByKeyword))
+ .addGap(0, 50, Short.MAX_VALUE)))
.addContainerGap())
);
searchPanelLayout.setVerticalGroup(
searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(searchPanelLayout.createSequentialGroup()
- .addContainerGap()
+ .addComponent(cbFilterByKeyword)
+ .addGap(1, 1, 1)
.addComponent(searchField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
@@ -365,7 +443,7 @@ public final class PersonasTopComponent extends TopComponent {
.addComponent(searchAccountRadio)
.addComponent(searchBtn))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
- .addComponent(resultsPane, javax.swing.GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE)
+ .addComponent(resultsPane, javax.swing.GroupLayout.PREFERRED_SIZE, 302, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(searchPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(editBtn)
@@ -396,11 +474,16 @@ public final class PersonasTopComponent extends TopComponent {
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addComponent(introTextScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 49, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
- .addComponent(mainSplitPane, javax.swing.GroupLayout.DEFAULT_SIZE, 470, Short.MAX_VALUE))
+ .addComponent(mainSplitPane, javax.swing.GroupLayout.DEFAULT_SIZE, 489, Short.MAX_VALUE))
);
}// //GEN-END:initComponents
+ private void cbFilterByKeywordActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cbFilterByKeywordActionPerformed
+ setKeywordSearchEnabled(cbFilterByKeyword.isSelected(), false);
+ }//GEN-LAST:event_cbFilterByKeywordActionPerformed
+
// Variables declaration - do not modify//GEN-BEGIN:variables
+ private javax.swing.JCheckBox cbFilterByKeyword;
private javax.swing.JButton createAccountBtn;
private javax.swing.JButton createBtn;
private javax.swing.JSeparator createButtonSeparator;
diff --git a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties
index 747f0c82f7..9a9ebd3929 100644
--- a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties
@@ -14,7 +14,7 @@ FiltersPanel.endCheckBox.text=End:
FiltersPanel.refreshButton.text=Refresh
FiltersPanel.deviceRequiredLabel.text=Select at least one.
FiltersPanel.accountTypeRequiredLabel.text=Select at least one.
-FiltersPanel.needsRefreshLabel.text=Displayed data is out of date. Press Refresh.
+FiltersPanel.needsRefreshLabel.text=Displayed data may be out of date. Press Refresh to update.
VisualizationPanel.jButton1.text=Fast Organic
CVTTopComponent.vizPanel.TabConstraints.tabTitle=Visualize
CVTTopComponent.accountsBrowser.TabConstraints.tabTitle_1=Browse
diff --git a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
index 59778273ab..0d4db75372 100755
--- a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
@@ -26,7 +26,7 @@ FiltersPanel.endCheckBox.text=End:
FiltersPanel.refreshButton.text=Refresh
FiltersPanel.deviceRequiredLabel.text=Select at least one.
FiltersPanel.accountTypeRequiredLabel.text=Select at least one.
-FiltersPanel.needsRefreshLabel.text=Displayed data is out of date. Press Refresh.
+FiltersPanel.needsRefreshLabel.text=Displayed data may be out of date. Press Refresh to update.
OpenCVTAction.displayName=Communications
PinAccountsAction.pluralText=Add Selected Accounts to Visualization
PinAccountsAction.singularText=Add Selected Account to Visualization
diff --git a/Core/src/org/sleuthkit/autopsy/communications/CVTFilterRefresher.java b/Core/src/org/sleuthkit/autopsy/communications/CVTFilterRefresher.java
new file mode 100755
index 0000000000..1ba9d6c81e
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/communications/CVTFilterRefresher.java
@@ -0,0 +1,161 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.communications;
+
+import java.beans.PropertyChangeEvent;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.swing.SwingUtilities;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
+import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
+import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
+import org.sleuthkit.datamodel.Account;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.DataSource;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * Refreshes the CVTFilterPanel.
+ */
+abstract class CVTFilterRefresher implements RefreshThrottler.Refresher {
+
+ private static final Logger logger = Logger.getLogger(CVTFilterRefresher.class.getName());
+ /**
+ * contains all of the gui control specific update code. Refresh will call
+ * this method with an involkLater so that the updating of the swing
+ * controls can happen on the EDT.
+ *
+ * @param data
+ */
+ abstract void updateFilterPanel(FilterPanelData data);
+
+ @Override
+ public void refresh() {
+ try {
+ Integer startTime;
+ Integer endTime;
+ SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
+
+ // Fetch Min/Max start times
+ try (SleuthkitCase.CaseDbQuery dbQuery = skCase.executeQuery("SELECT MAX(date_time) as end, MIN(date_time) as start from account_relationships")) {
+ // ResultSet is closed by CasDBQuery
+ ResultSet rs = dbQuery.getResultSet();
+ startTime = rs.getInt("start"); // NON-NLS
+ endTime = rs.getInt("end"); // NON-NLS
+ }
+ // Get the devices with CVT artifacts
+ List deviceObjIds = new ArrayList<>();
+ try (SleuthkitCase.CaseDbQuery queryResult = skCase.executeQuery("SELECT DISTINCT data_source_obj_id FROM account_relationships")) {
+ // ResultSet is closed by CasDBQuery
+ ResultSet rs = queryResult.getResultSet();
+ while (rs.next()) {
+ deviceObjIds.add(rs.getInt(1));
+ }
+ }
+
+ // The map key is the Content name instead of the data source name
+ // to match how the CVT filters work.
+ Map dataSourceMap = new HashMap<>();
+ for (DataSource dataSource : skCase.getDataSources()) {
+ if (deviceObjIds.contains((int) dataSource.getId())) {
+ String dsName = skCase.getContentById(dataSource.getId()).getName();
+ dataSourceMap.put(dsName, dataSource);
+ }
+ }
+
+ List accountTypesInUse = skCase.getCommunicationsManager().getAccountTypesInUse();
+
+ SwingUtilities.invokeLater(new Runnable() {
+ @Override
+ public void run() {
+ updateFilterPanel(new FilterPanelData(dataSourceMap, accountTypesInUse, startTime, endTime));
+ }
+ });
+
+ } catch (SQLException | TskCoreException ex) {
+ logger.log(Level.WARNING, "Unable to update CVT filter panel.", ex);
+ } catch (NoCurrentCaseException notUsed) {
+ /**
+ * Case is closed, do nothing.
+ */
+ }
+
+ }
+
+ @Override
+ public boolean isRefreshRequired(PropertyChangeEvent evt) {
+ String eventType = evt.getPropertyName();
+ if (eventType.equals(DATA_ADDED.toString())) {
+ // Indicate that a refresh may be needed, unless the data added is Keyword or Hashset hits
+ ModuleDataEvent eventData = (ModuleDataEvent) evt.getOldValue();
+ return (null != eventData
+ && (eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID()
+ || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT.getTypeID()
+ || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG.getTypeID()
+ || eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID()));
+ }
+
+ return false;
+ }
+
+ /**
+ * Class to hold the data for setting up the filter panel gui controls.
+ */
+ class FilterPanelData {
+
+ private final Map dataSourceMap;
+ private final Integer startTime;
+ private final Integer endTime;
+ private final List accountTypesInUse;
+
+ FilterPanelData(Map dataSourceMap, List accountTypesInUse, Integer startTime, Integer endTime) {
+ this.dataSourceMap = dataSourceMap;
+ this.startTime = startTime;
+ this.endTime = endTime;
+ this.accountTypesInUse = accountTypesInUse;
+ }
+
+ Map getDataSourceMap() {
+ return dataSourceMap;
+ }
+
+ Integer getStartTime() {
+ return startTime;
+ }
+
+ Integer getEndTime() {
+ return endTime;
+ }
+
+ List getAccountTypesInUse() {
+ return accountTypesInUse;
+ }
+
+ }
+
+}
diff --git a/Core/src/org/sleuthkit/autopsy/communications/CVTTopComponent.java b/Core/src/org/sleuthkit/autopsy/communications/CVTTopComponent.java
index 5f62c73c67..fbd41b5840 100644
--- a/Core/src/org/sleuthkit/autopsy/communications/CVTTopComponent.java
+++ b/Core/src/org/sleuthkit/autopsy/communications/CVTTopComponent.java
@@ -189,7 +189,7 @@ public final class CVTTopComponent extends TopComponent {
*
* Re-applying the filters means we will lose the selection...
*/
- filtersPane.updateAndApplyFilters(true);
+ filtersPane.initalizeFilters();
}
@Override
diff --git a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.form b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.form
index baa62440be..8598d04494 100644
--- a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.form
+++ b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.form
@@ -18,11 +18,11 @@
+
+
-
-
@@ -222,9 +222,14 @@
+
+
+
+
+
-
+
@@ -277,7 +282,6 @@
-
diff --git a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java
index 70f5de0a5e..7bdf3a46e3 100644
--- a/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/communications/FiltersPanel.java
@@ -18,25 +18,24 @@
*/
package org.sleuthkit.autopsy.communications;
+import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
import com.google.common.collect.ImmutableSet;
import com.google.common.eventbus.Subscribe;
import java.awt.event.ItemListener;
import java.beans.PropertyChangeListener;
-import java.sql.ResultSet;
-import java.sql.SQLException;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneId;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.logging.Level;
import java.util.stream.Collectors;
import javax.swing.Box;
import javax.swing.BoxLayout;
@@ -45,11 +44,9 @@ import javax.swing.ImageIcon;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
-import javax.swing.SwingWorker;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
-import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
@@ -59,7 +56,6 @@ import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.BlackboardArtifact;
-import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbAccessQueryCallback;
import org.sleuthkit.datamodel.CommunicationsFilter;
import org.sleuthkit.datamodel.CommunicationsFilter.AccountTypeFilter;
import org.sleuthkit.datamodel.CommunicationsFilter.DateRangeFilter;
@@ -69,8 +65,6 @@ import org.sleuthkit.datamodel.DataSource;
import static org.sleuthkit.datamodel.Relationship.Type.CALL_LOG;
import static org.sleuthkit.datamodel.Relationship.Type.CONTACT;
import static org.sleuthkit.datamodel.Relationship.Type.MESSAGE;
-import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
/**
* Panel that holds the Filter control widgets and triggers queries against the
@@ -114,6 +108,8 @@ final public class FiltersPanel extends JPanel {
*/
private final ItemListener validationListener;
+ private final RefreshThrottler refreshThrottler;
+
/**
* Is the device account type filter enabled or not. It should be enabled
* when the Table/Brows mode is active and disabled when the visualization
@@ -129,6 +125,7 @@ final public class FiltersPanel extends JPanel {
initComponents();
initalizeDeviceAccountType();
+ setDateTimeFiltersToDefault();
deviceRequiredLabel.setVisible(false);
accountTypeRequiredLabel.setVisible(false);
@@ -160,25 +157,27 @@ final public class FiltersPanel extends JPanel {
if (eventType.equals(DATA_ADDED.toString())) {
// Indicate that a refresh may be needed, unless the data added is Keyword or Hashset hits
ModuleDataEvent eventData = (ModuleDataEvent) pce.getOldValue();
- if (null != eventData
+ if (!needsRefresh
+ && null != eventData
&& (eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE.getTypeID()
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT.getTypeID()
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG.getTypeID()
|| eventData.getBlackboardArtifactType().getTypeID() == BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG.getTypeID())) {
- updateFilters(true);
needsRefresh = true;
validateFilters();
}
}
};
+ refreshThrottler = new RefreshThrottler(new FilterPanelRefresher(false, false));
+
this.ingestJobListener = pce -> {
String eventType = pce.getPropertyName();
- if (eventType.equals(COMPLETED.toString())
- && updateFilters(true)) {
+ if (eventType.equals(COMPLETED.toString()) && !needsRefresh) {
needsRefresh = true;
validateFilters();
+
}
};
@@ -220,39 +219,24 @@ final public class FiltersPanel extends JPanel {
}
}
- /**
- * Update the filter widgets, and apply them.
- */
- void updateAndApplyFilters(boolean initialState) {
- updateFilters(initialState);
- applyFilters();
- initalizeDateTimeFilters();
+ void initalizeFilters() {
+ Runnable runnable = new Runnable() {
+ @Override
+ public void run() {
+ new FilterPanelRefresher(true, true).refresh();
+ }
+ };
+ runnable.run();
}
private void updateTimeZone() {
dateRangeLabel.setText("Date Range (" + Utils.getUserPreferredZoneId().toString() + "):");
}
- /**
- * Updates the filter widgets to reflect he data sources/types in the case.
- */
- private boolean updateFilters(boolean initialState) {
- final SleuthkitCase sleuthkitCase;
- try {
- sleuthkitCase = Case.getCurrentCaseThrows().getSleuthkitCase();
- } catch (NoCurrentCaseException ex) {
- logger.log(Level.WARNING, "Unable to perform filter update, update has been cancelled. Case is closed.", ex);
- return false;
- }
- boolean newAccountType = updateAccountTypeFilter(initialState, sleuthkitCase);
- boolean newDeviceFilter = updateDeviceFilter(initialState, sleuthkitCase);
- // both or either are true, return true;
- return newAccountType || newDeviceFilter;
- }
-
@Override
public void addNotify() {
super.addNotify();
+ refreshThrottler.registerForIngestModuleEvents();
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, ingestListener);
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, ingestJobListener);
Case.addEventTypeSubscriber(EnumSet.of(CURRENT_CASE), evt -> {
@@ -270,6 +254,7 @@ final public class FiltersPanel extends JPanel {
@Override
public void removeNotify() {
super.removeNotify();
+ refreshThrottler.unregisterEventListener();
IngestManager.getInstance().removeIngestModuleEventListener(ingestListener);
IngestManager.getInstance().removeIngestJobEventListener(ingestJobListener);
}
@@ -283,33 +268,25 @@ final public class FiltersPanel extends JPanel {
/**
* Populate the Account Types filter widgets.
*
- * @param selected The initial value for the account type checkbox.
- * @param sleuthkitCase The sleuthkit case for containing the account
- * information.
+ * @param accountTypesInUse List of accountTypes currently in use
*
* @return True, if a new accountType was found
*/
- private boolean updateAccountTypeFilter(boolean selected, SleuthkitCase sleuthkitCase) {
+ private boolean updateAccountTypeFilter(List accountTypesInUse, boolean checkNewOnes) {
boolean newOneFound = false;
- try {
- List accountTypesInUse = sleuthkitCase.getCommunicationsManager().getAccountTypesInUse();
- for (Account.Type type : accountTypesInUse) {
+ for (Account.Type type : accountTypesInUse) {
+ if (!accountTypeMap.containsKey(type) && !type.equals(Account.Type.CREDIT_CARD)) {
+ CheckBoxIconPanel panel = createAccoutTypeCheckBoxPanel(type, checkNewOnes);
+ accountTypeMap.put(type, panel.getCheckBox());
+ accountTypeListPane.add(panel);
- if (!accountTypeMap.containsKey(type) && !type.equals(Account.Type.CREDIT_CARD)) {
- CheckBoxIconPanel panel = createAccoutTypeCheckBoxPanel(type, selected);
- accountTypeMap.put(type, panel.getCheckBox());
- accountTypeListPane.add(panel);
-
- newOneFound = true;
- }
+ newOneFound = true;
}
-
- } catch (TskCoreException ex) {
- logger.log(Level.WARNING, "Unable to update to update Account Types Filter", ex);
}
+
if (newOneFound) {
- accountTypeListPane.revalidate();
+ accountTypeListPane.validate();
}
return newOneFound;
@@ -343,32 +320,43 @@ final public class FiltersPanel extends JPanel {
*
* @return true if a new device was found
*/
- private boolean updateDeviceFilter(boolean selected, SleuthkitCase sleuthkitCase) {
+ private void updateDeviceFilterPanel(Map dataSourceMap, boolean checkNewOnes) {
boolean newOneFound = false;
- try {
- for (DataSource dataSource : sleuthkitCase.getDataSources()) {
- String dsName = sleuthkitCase.getContentById(dataSource.getId()).getName();
- if (devicesMap.containsKey(dataSource.getDeviceId())) {
- continue;
- }
-
- final JCheckBox jCheckBox = new JCheckBox(dsName, selected);
- jCheckBox.addItemListener(validationListener);
- devicesListPane.add(jCheckBox);
- devicesMap.put(dataSource.getDeviceId(), jCheckBox);
-
- newOneFound = true;
-
+ for (Entry entry : dataSourceMap.entrySet()) {
+ if (devicesMap.containsKey(entry.getValue().getDeviceId())) {
+ continue;
}
- } catch (TskCoreException tskCoreException) {
- logger.log(Level.SEVERE, "There was a error loading the datasources for the case.", tskCoreException);
+
+ final JCheckBox jCheckBox = new JCheckBox(entry.getKey(), checkNewOnes);
+ jCheckBox.addItemListener(validationListener);
+ jCheckBox.setToolTipText(entry.getKey());
+ devicesListPane.add(jCheckBox);
+ devicesMap.put(entry.getValue().getDeviceId(), jCheckBox);
+
+ newOneFound = true;
}
if (newOneFound) {
+ devicesListPane.removeAll();
+ List checkList = new ArrayList<>(devicesMap.values());
+ checkList.sort(new DeviceCheckBoxComparator());
+
+ for (JCheckBox cb : checkList) {
+ devicesListPane.add(cb);
+ }
+
devicesListPane.revalidate();
}
+ }
- return newOneFound;
+ private void updateDateTimePicker(Integer start, Integer end) {
+ if (start != null && start != 0) {
+ startDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(start), Utils.getUserPreferredZoneId()).toLocalDate());
+ }
+
+ if (end != null && end != 0) {
+ endDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(end), Utils.getUserPreferredZoneId()).toLocalDate());
+ }
}
/**
@@ -477,9 +465,9 @@ final public class FiltersPanel extends JPanel {
setLayout(new java.awt.GridBagLayout());
- scrollPane.setBorder(null);
scrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scrollPane.setAutoscrolls(true);
+ scrollPane.setBorder(null);
mainPanel.setLayout(new java.awt.GridBagLayout());
@@ -613,6 +601,7 @@ final public class FiltersPanel extends JPanel {
gridBagConstraints.insets = new java.awt.Insets(15, 0, 0, 25);
mainPanel.add(dateRangePane, gridBagConstraints);
+ devicesPane.setPreferredSize(new java.awt.Dimension(300, 300));
devicesPane.setLayout(new java.awt.GridBagLayout());
unCheckAllDevicesButton.setText(org.openide.util.NbBundle.getMessage(FiltersPanel.class, "FiltersPanel.unCheckAllDevicesButton.text")); // NOI18N
@@ -652,7 +641,6 @@ final public class FiltersPanel extends JPanel {
gridBagConstraints.insets = new java.awt.Insets(9, 0, 0, 0);
devicesPane.add(checkAllDevicesButton, gridBagConstraints);
- devicesScrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
devicesScrollPane.setMaximumSize(new java.awt.Dimension(32767, 30));
devicesScrollPane.setMinimumSize(new java.awt.Dimension(27, 30));
devicesScrollPane.setPreferredSize(new java.awt.Dimension(3, 30));
@@ -686,10 +674,10 @@ final public class FiltersPanel extends JPanel {
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
- gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
- gridBagConstraints.ipady = 100;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(15, 0, 0, 25);
mainPanel.add(devicesPane, gridBagConstraints);
@@ -836,10 +824,11 @@ final public class FiltersPanel extends JPanel {
/**
* Post an event with the new filters.
*/
- private void applyFilters() {
- CVTEvents.getCVTEventBus().post(new CVTEvents.FilterChangeEvent(getFilter(), getStartControlState(), getEndControlState()));
+ void applyFilters() {
needsRefresh = false;
validateFilters();
+ CVTEvents.getCVTEventBus().post(new CVTEvents.FilterChangeEvent(getFilter(), getStartControlState(), getEndControlState()));
+
}
/**
@@ -958,31 +947,6 @@ final public class FiltersPanel extends JPanel {
map.values().forEach(box -> box.setSelected(selected));
}
- /**
- * initalize the DateTimePickers by grabbing the earliest and latest time
- * from the autopsy db.
- */
- private void initalizeDateTimeFilters() {
- Case currentCase = null;
- try {
- currentCase = Case.getCurrentCaseThrows();
- } catch (NoCurrentCaseException ex) {
- logger.log(Level.INFO, "Tried to intialize communication filters date range filters without an open case, using default values");
- }
-
- if (currentCase == null) {
- setDateTimeFiltersToDefault();
- openCase = null;
- return;
- }
-
- if (!currentCase.equals(openCase)) {
- setDateTimeFiltersToDefault();
- openCase = currentCase;
- (new DatePickerWorker()).execute();
- }
- }
-
private void setDateTimeFiltersToDefault() {
startDatePicker.setDate(LocalDate.now().minusWeeks(3));
endDatePicker.setDate(LocalDate.now());
@@ -1159,69 +1123,51 @@ final public class FiltersPanel extends JPanel {
}
/**
- * A simple class that implements CaseDbAccessQueryCallback. Can be used as
- * an anonymous innerclass with the CaseDbAccessManager select function.
+ * Extends the CVTFilterRefresher abstract class to add the calls to update
+ * the ui controls with the data found. Note that updateFilterPanel is run
+ * in the EDT.
*/
- class FilterPanelQueryCallback implements CaseDbAccessQueryCallback {
+ final class FilterPanelRefresher extends CVTFilterRefresher {
+
+ private final boolean selectNewOption;
+ private final boolean refreshAfterUpdate;
+
+ FilterPanelRefresher(boolean selectNewOptions, boolean refreshAfterUpdate) {
+ this.selectNewOption = selectNewOptions;
+ this.refreshAfterUpdate = refreshAfterUpdate;
+ }
@Override
- public void process(ResultSet rs) {
- // Subclasses can implement their own process function.
+ void updateFilterPanel(CVTFilterRefresher.FilterPanelData data) {
+ updateDateTimePicker(data.getStartTime(), data.getEndTime());
+ updateDeviceFilterPanel(data.getDataSourceMap(), selectNewOption);
+ updateAccountTypeFilter(data.getAccountTypesInUse(), selectNewOption);
+
+ FiltersPanel.this.repaint();
+
+ if (refreshAfterUpdate) {
+ applyFilters();
+ }
+
+ if (!isEnabled()) {
+ setEnabled(true);
+ }
+
+ validateFilters();
+
+ repaint();
}
}
- final class DatePickerWorker extends SwingWorker, Void> {
+ /**
+ * Sorts a list of JCheckBoxes in alphabetical order of the text field
+ * value.
+ */
+ class DeviceCheckBoxComparator implements Comparator {
@Override
- protected Map doInBackground() throws Exception {
- if (openCase == null) {
- return null;
- }
-
- Map resultMap = new HashMap<>();
- String queryString = "max(date_time) as end, min(date_time) as start from account_relationships"; // NON-NLS
-
- openCase.getSleuthkitCase().getCaseDbAccessManager().select(queryString, new FilterPanelQueryCallback() {
- @Override
- public void process(ResultSet rs) {
- try {
- if (rs.next()) {
- int startDate = rs.getInt("start"); // NON-NLS
- int endDate = rs.getInt("end"); // NON-NLS
-
- resultMap.put("start", startDate); // NON-NLS
- resultMap.put("end", endDate); // NON-NLS
- }
- } catch (SQLException ex) {
- // Not the end of the world if this fails.
- logger.log(Level.WARNING, String.format("SQL Exception thrown from Query: %s", queryString), ex);
- }
- }
- });
-
- return resultMap;
- }
-
- @Override
- protected void done() {
- try {
- Map resultMap = get();
- if (resultMap != null) {
- Integer start = resultMap.get("start");
- Integer end = resultMap.get("end");
-
- if (start != null && start != 0) {
- startDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(start), Utils.getUserPreferredZoneId()).toLocalDate());
- }
-
- if (end != null && end != 0) {
- endDatePicker.setDate(LocalDateTime.ofInstant(Instant.ofEpochSecond(end), Utils.getUserPreferredZoneId()).toLocalDate());
- }
- }
- } catch (InterruptedException | ExecutionException ex) {
- logger.log(Level.WARNING, "Exception occured after date time sql query", ex);
- }
+ public int compare(JCheckBox e1, JCheckBox e2) {
+ return e1.getText().toLowerCase().compareTo(e2.getText().toLowerCase());
}
}
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED
index c69da11d54..317d7050de 100644
--- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/Bundle.properties-MERGED
@@ -61,6 +61,7 @@ DefaultArtifactContentViewer.copyMenuItem.text=Copy
DefaultArtifactContentViewer.selectAllMenuItem.text=Select All
MessageAccountPanel_button_create_label=Create
MessageAccountPanel_button_view_label=View
+MessageAccountPanel_no_matches=No matches found.
MessageAccountPanel_persona_label=Persona:
MessageAccountPanel_unknown_label=Unknown
MessageArtifactViewer.AttachmentPanel.title=Attachments
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageAccountPanel.java b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageAccountPanel.java
index 61d14f19a0..050fca83fe 100755
--- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageAccountPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageAccountPanel.java
@@ -126,26 +126,47 @@ final class MessageAccountPanel extends JPanel {
return dataList;
}
+ @Messages({
+ "MessageAccountPanel_no_matches=No matches found.",
+ })
@Override
protected void done() {
try {
List dataList = get();
- dataList.forEach(container -> {
- container.initalizeSwingControls();
- });
+ if (!dataList.isEmpty()) {
+ dataList.forEach(container -> {
+ container.initalizeSwingControls();
+ });
- GroupLayout layout = new GroupLayout(MessageAccountPanel.this);
- layout.setHorizontalGroup(
- layout.createParallelGroup(Alignment.LEADING)
- .addGroup(layout.createSequentialGroup()
- .addContainerGap()
- .addGroup(getMainHorizontalGroup(layout, dataList))
- .addContainerGap(158, Short.MAX_VALUE)));
+ GroupLayout layout = new GroupLayout(MessageAccountPanel.this);
+ layout.setHorizontalGroup(
+ layout.createParallelGroup(Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addContainerGap()
+ .addGroup(getMainHorizontalGroup(layout, dataList))
+ .addContainerGap(158, Short.MAX_VALUE)));
- layout.setVerticalGroup(getMainVerticalGroup(layout, dataList));
- setLayout(layout);
- repaint();
+ layout.setVerticalGroup(getMainVerticalGroup(layout, dataList));
+ setLayout(layout);
+ repaint();
+ } else {
+ // No match found, display a message.
+ JPanel messagePanel = new javax.swing.JPanel();
+ JLabel messageLabel = new javax.swing.JLabel();
+
+ messagePanel.setLayout(new java.awt.BorderLayout());
+
+ messageLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
+ messageLabel.setText(Bundle.MessageAccountPanel_no_matches());
+ messageLabel.setEnabled(false);
+ messagePanel.add(messageLabel, java.awt.BorderLayout.CENTER);
+
+ setLayout(new javax.swing.OverlayLayout(MessageAccountPanel.this));
+
+ add(messagePanel);
+ repaint();
+ }
} catch (CancellationException ex) {
logger.log(Level.INFO, "MessageAccoutPanel thread cancelled", ex);
} catch (InterruptedException | ExecutionException ex) {
diff --git a/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/DataSourceProcessor.java b/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/DataSourceProcessor.java
index 8e81590231..de51cf8add 100644
--- a/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/DataSourceProcessor.java
+++ b/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/DataSourceProcessor.java
@@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.corecomponentinterfaces;
import javax.swing.JPanel;
+import org.sleuthkit.autopsy.ingest.IngestJobSettings;
/**
* Interface implemented by classes that add data sources of a particular type
@@ -36,10 +37,6 @@ import javax.swing.JPanel;
*
* Data source processors should perform all processing in a background task in
* a separate thread, reporting results using a callback object.
- *
- * It is recommended that implementers provide an overload of the run method
- * that allows the data source processor to be run independently of the
- * selection and configuration panel.
*/
public interface DataSourceProcessor {
@@ -111,6 +108,38 @@ public interface DataSourceProcessor {
* to return results.
*/
void run(DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback);
+
+ /**
+ * Adds a data source to the case database using a background task in a
+ * separate thread and the settings provided by the selection and
+ * configuration panel. Files found during ingest will be sent directly to
+ * the IngestStream provided. Returns as soon as the background task is
+ * started. The background task uses a callback object to signal task
+ * completion and return results.
+ *
+ * This method should not be called unless isPanelValid returns true, and
+ * should only be called for DSPs that support ingest streams. The ingest
+ * settings must be complete before calling this method.
+ *
+ * @param settings The ingest job settings.
+ * @param progress Progress monitor that will be used by the background task
+ * to report progress.
+ * @param callBack Callback that will be used by the background task to
+ * return results.
+ */
+ default void runWithIngestStream(IngestJobSettings settings, DataSourceProcessorProgressMonitor progress,
+ DataSourceProcessorCallback callBack) {
+ throw new UnsupportedOperationException("Streaming ingest not supported for this data source processor");
+ }
+
+ /**
+ * Check if this DSP supports ingest streams.
+ *
+ * @return True if this DSP supports an ingest stream, false otherwise.
+ */
+ default boolean supportsIngestStream() {
+ return false;
+ }
/**
* Requests cancellation of the background task that adds a data source to
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
index 8d9943ffc4..8408c0c74a 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
@@ -55,6 +55,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWO
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
/**
* Parent of the "extracted content" artifacts to be displayed in the tree.
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
index 566545eecd..7108d13a84 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
@@ -48,6 +48,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
+import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
/**
* Filters database results by file extension.
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
index 370b4f2809..5e2f19648c 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
@@ -50,6 +50,7 @@ import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
+import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
/**
* Class which contains the Nodes for the 'By Mime Type' view located in the
diff --git a/Core/src/org/sleuthkit/autopsy/discovery/DiscoveryTopComponent.java b/Core/src/org/sleuthkit/autopsy/discovery/DiscoveryTopComponent.java
index 607459325d..526207981c 100644
--- a/Core/src/org/sleuthkit/autopsy/discovery/DiscoveryTopComponent.java
+++ b/Core/src/org/sleuthkit/autopsy/discovery/DiscoveryTopComponent.java
@@ -39,14 +39,14 @@ import org.sleuthkit.autopsy.discovery.FileSearchFiltering.FileFilter;
/**
* Create a dialog for displaying the Discovery results.
*/
-@TopComponent.Description(preferredID = "DiscoveryTopComponent", persistenceType = TopComponent.PERSISTENCE_NEVER)
+@TopComponent.Description(preferredID = "Discovery", persistenceType = TopComponent.PERSISTENCE_NEVER)
@TopComponent.Registration(mode = "discovery", openAtStartup = false)
@RetainLocation("discovery")
@NbBundle.Messages("DiscoveryTopComponent.name= Discovery")
public final class DiscoveryTopComponent extends TopComponent {
private static final long serialVersionUID = 1L;
- private static final String PREFERRED_ID = "DiscoveryTopComponent"; // NON-NLS
+ private static final String PREFERRED_ID = "Discovery"; // NON-NLS
private final GroupListPanel groupListPanel;
private final DetailsPanel detailsPanel;
private final ResultsPanel resultsPanel;
diff --git a/Core/src/org/sleuthkit/autopsy/geolocation/datamodel/Route.java b/Core/src/org/sleuthkit/autopsy/geolocation/datamodel/Route.java
index 13a23b30e3..d275c20ac1 100755
--- a/Core/src/org/sleuthkit/autopsy/geolocation/datamodel/Route.java
+++ b/Core/src/org/sleuthkit/autopsy/geolocation/datamodel/Route.java
@@ -20,6 +20,7 @@
package org.sleuthkit.autopsy.geolocation.datamodel;
import java.util.Collections;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.openide.util.NbBundle.Messages;
@@ -58,12 +59,12 @@ public class Route extends GeoPath {
Map attributeMap = Waypoint.getAttributesFromArtifactAsMap(artifact);
- createRoute(artifact, attributeMap);
-
BlackboardAttribute attribute = attributeMap.get(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME);
timestamp = attribute != null ? attribute.getValueLong() : null;
propertiesList = Waypoint.createGeolocationProperties(attributeMap);
+
+ createRoute(artifact, attributeMap);
}
/**
@@ -124,7 +125,14 @@ public class Route extends GeoPath {
throw new GeoLocationDataException(String.format("Unable to parse waypoints in TSK_GEO_WAYPOINTS attribute (artifact object ID =%d)", artifact.getId()), ex);
}
for (GeoWaypoints.Waypoint waypoint : waypoints) {
- addToPath(new Waypoint(artifact, label, null, waypoint.getLatitude(), waypoint.getLongitude(), waypoint.getAltitude(), null, attributeMap, this));
+ String name = waypoint.getName();
+ Map map = attributeMap;
+ if(name != null && !name.isEmpty()) {
+ BlackboardAttribute pointNameAtt = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_LOCATION, "", name);
+ map = new HashMap<>(attributeMap);
+ map.put(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_LOCATION, pointNameAtt);
+ }
+ addToPath(new Waypoint(artifact, label, timestamp, waypoint.getLatitude(), waypoint.getLongitude(), waypoint.getAltitude(), null, map, this));
}
} else {
Waypoint start = getRouteStartPoint(artifact, attributeMap);
@@ -157,12 +165,14 @@ public class Route extends GeoPath {
BlackboardAttribute altitude = attributeMap.get(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE);
if (latitude != null && longitude != null) {
- return new RoutePoint(artifact,
+ return new Waypoint(artifact,
Bundle.Route_Start_Label(),
+ timestamp,
latitude.getValueDouble(),
longitude.getValueDouble(),
altitude != null ? altitude.getValueDouble() : null,
- attributeMap);
+ null,
+ attributeMap, this);
} else {
throw new GeoLocationDataException("Unable to create route start point, invalid longitude and/or latitude");
}
@@ -190,49 +200,17 @@ public class Route extends GeoPath {
if (latitude != null && longitude != null) {
- return new RoutePoint(artifact,
+ return new Waypoint(artifact,
Bundle.Route_End_Label(),
+ timestamp,
latitude.getValueDouble(),
longitude.getValueDouble(),
altitude != null ? altitude.getValueDouble() : null,
- attributeMap);
+ null,
+ attributeMap,
+ this);
} else {
throw new GeoLocationDataException("Unable to create route end point, invalid longitude and/or latitude");
}
}
-
- /**
- * Route waypoint specific implementation of Waypoint.
- */
- private class RoutePoint extends Waypoint {
-
- /**
- * Construct a RoutePoint
- *
- * @param artifact BlackboardArtifact for this waypoint
- * @param label String waypoint label
- * @param latitude Double waypoint latitude
- * @param longitude Double waypoint longitude
- *
- * @param attributeMap A Map of attributes for the given artifact
- *
- * @throws GeoLocationDataException
- */
- RoutePoint(BlackboardArtifact artifact, String label, Double latitude, Double longitude, Double altitude, Map attributeMap) throws GeoLocationDataException {
- super(artifact,
- label,
- null,
- latitude,
- longitude,
- altitude,
- null,
- attributeMap,
- Route.this);
- }
-
- @Override
- public Long getTimestamp() {
- return ((Route) getParentGeoPath()).getTimestamp();
- }
- }
}
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/RefreshThrottler.java b/Core/src/org/sleuthkit/autopsy/guiutils/RefreshThrottler.java
old mode 100644
new mode 100755
similarity index 94%
rename from Core/src/org/sleuthkit/autopsy/datamodel/RefreshThrottler.java
rename to Core/src/org/sleuthkit/autopsy/guiutils/RefreshThrottler.java
index a8d3ed5581..2610642761
--- a/Core/src/org/sleuthkit/autopsy/datamodel/RefreshThrottler.java
+++ b/Core/src/org/sleuthkit/autopsy/guiutils/RefreshThrottler.java
@@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.sleuthkit.autopsy.datamodel;
+package org.sleuthkit.autopsy.guiutils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.beans.PropertyChangeEvent;
@@ -33,13 +33,13 @@ import org.sleuthkit.autopsy.ingest.IngestManager;
* potentially expensive UI refresh events when DATA_ADDED and CONTENT_CHANGED
* ingest manager events are received.
*/
-class RefreshThrottler {
+public class RefreshThrottler {
/**
* The Refresher interface needs to be implemented by ChildFactory instances
* that wish to take advantage of throttled refresh functionality.
*/
- interface Refresher {
+ public interface Refresher {
/**
* The RefreshThrottler calls this method when the RefreshTask runs.
@@ -89,7 +89,7 @@ class RefreshThrottler {
*/
private final PropertyChangeListener pcl;
- RefreshThrottler(Refresher r) {
+ public RefreshThrottler(Refresher r) {
this.refreshTaskRef = new AtomicReference<>(null);
refresher = r;
@@ -112,14 +112,14 @@ class RefreshThrottler {
/**
* Set up listener for ingest module events of interest.
*/
- void registerForIngestModuleEvents() {
+ public void registerForIngestModuleEvents() {
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl);
}
/**
* Remove ingest module event listener.
*/
- void unregisterEventListener() {
+ public void unregisterEventListener() {
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties
index ee1cffea00..4b5b431537 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties
@@ -89,6 +89,7 @@ IngestJobTableModel.colName.inProgress=In Progress
IngestJobTableModel.colName.filesQueued=Files Queued
IngestJobTableModel.colName.dirQueued=Dir Queued
IngestJobTableModel.colName.rootQueued=Root Queued
+IngestJobTableModel.colName.streamingQueued=Streaming Queued
IngestJobTableModel.colName.dsQueued=DS Queued
ModuleTableModel.colName.module=Module
ModuleTableModel.colName.duration=Duration
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED
index 9e4f612b6b..157506a57f 100755
--- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties-MERGED
@@ -104,6 +104,7 @@ IngestJobTableModel.colName.inProgress=In Progress
IngestJobTableModel.colName.filesQueued=Files Queued
IngestJobTableModel.colName.dirQueued=Dir Queued
IngestJobTableModel.colName.rootQueued=Root Queued
+IngestJobTableModel.colName.streamingQueued=Streaming Queued
IngestJobTableModel.colName.dsQueued=DS Queued
ModuleTableModel.colName.module=Module
ModuleTableModel.colName.duration=Duration
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java
index 280613c2b3..a2bd23b692 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java
@@ -23,10 +23,10 @@ package org.sleuthkit.autopsy.ingest;
*/
public class DataSourceIngestModuleProgress {
- private final DataSourceIngestJob job;
+ private final IngestJobPipeline ingestJobPipeline;
- DataSourceIngestModuleProgress(DataSourceIngestJob job) {
- this.job = job;
+ DataSourceIngestModuleProgress(IngestJobPipeline pipeline) {
+ this.ingestJobPipeline = pipeline;
}
/**
@@ -38,7 +38,7 @@ public class DataSourceIngestModuleProgress {
* data source.
*/
public void switchToDeterminate(int workUnits) {
- this.job.switchDataSourceIngestProgressBarToDeterminate(workUnits);
+ this.ingestJobPipeline.switchDataSourceIngestProgressBarToDeterminate(workUnits);
}
/**
@@ -46,7 +46,7 @@ public class DataSourceIngestModuleProgress {
* the total work units to process the data source is unknown.
*/
public void switchToIndeterminate() {
- this.job.switchDataSourceIngestProgressBarToIndeterminate();
+ this.ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
}
/**
@@ -56,7 +56,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(int workUnits) {
- this.job.advanceDataSourceIngestProgressBar("", workUnits);
+ this.ingestJobPipeline.advanceDataSourceIngestProgressBar("", workUnits);
}
/**
@@ -65,7 +65,7 @@ public class DataSourceIngestModuleProgress {
* @param message Message to display
*/
public void progress(String message) {
- this.job.advanceDataSourceIngestProgressBar(message);
+ this.ingestJobPipeline.advanceDataSourceIngestProgressBar(message);
}
/**
@@ -76,7 +76,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module.
*/
public void progress(String currentTask, int workUnits) {
- this.job.advanceDataSourceIngestProgressBar(currentTask, workUnits);
+ this.ingestJobPipeline.advanceDataSourceIngestProgressBar(currentTask, workUnits);
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java
index 12645df4e6..c22d63348c 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java
@@ -24,12 +24,11 @@ import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.datamodel.Content;
/**
- * This class manages a sequence of data source level ingest modules for a data
- * source ingest job. It starts the modules, runs data sources through them, and
+ * This class manages a sequence of data source level ingest modules for an
+ * ingestJobPipeline. It starts the modules, runs data sources through them, and
* shuts them down when data source level ingest is complete.
*
* This class is thread-safe.
@@ -38,7 +37,7 @@ final class DataSourceIngestPipeline {
private static final IngestManager ingestManager = IngestManager.getInstance();
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
- private final DataSourceIngestJob job;
+ private final IngestJobPipeline ingestJobPipeline;
private final List modules = new ArrayList<>();
private volatile PipelineModule currentModule;
@@ -47,13 +46,12 @@ final class DataSourceIngestPipeline {
* modules. It starts the modules, runs data sources through them, and shuts
* them down when data source level ingest is complete.
*
- * @param job The data source ingest job that owns this
- * pipeline.
+ * @param ingestJobPipeline The ingestJobPipeline that owns this pipeline.
* @param moduleTemplates Templates for the creating the ingest modules that
* make up this pipeline.
*/
- DataSourceIngestPipeline(DataSourceIngestJob job, List moduleTemplates) {
- this.job = job;
+ DataSourceIngestPipeline(IngestJobPipeline ingestJobPipeline, List moduleTemplates) {
+ this.ingestJobPipeline = ingestJobPipeline;
for (IngestModuleTemplate template : moduleTemplates) {
if (template.isDataSourceIngestModuleTemplate()) {
PipelineModule module = new PipelineModule(template.createDataSourceIngestModule(), template.getModuleName());
@@ -80,7 +78,7 @@ final class DataSourceIngestPipeline {
List errors = new ArrayList<>();
for (PipelineModule module : modules) {
try {
- module.startUp(new IngestJobContext(this.job));
+ module.startUp(new IngestJobContext(this.ingestJobPipeline));
} catch (Throwable ex) { // Catch-all exception firewall
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
@@ -98,7 +96,7 @@ final class DataSourceIngestPipeline {
*/
synchronized List process(DataSourceIngestTask task) {
List errors = new ArrayList<>();
- if (!this.job.isCancelled()) {
+ if (!this.ingestJobPipeline.isCancelled()) {
Content dataSource = task.getDataSource();
for (PipelineModule module : modules) {
try {
@@ -106,19 +104,19 @@ final class DataSourceIngestPipeline {
String displayName = NbBundle.getMessage(this.getClass(),
"IngestJob.progress.dataSourceIngest.displayName",
module.getDisplayName(), dataSource.getName());
- this.job.updateDataSourceIngestProgressBarDisplayName(displayName);
- this.job.switchDataSourceIngestProgressBarToIndeterminate();
+ this.ingestJobPipeline.updateDataSourceIngestProgressBarDisplayName(displayName);
+ this.ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
DataSourceIngestPipeline.ingestManager.setIngestTaskProgress(task, module.getDisplayName());
- logger.log(Level.INFO, "{0} analysis of {1} (jobId={2}) starting", new Object[]{module.getDisplayName(), this.job.getDataSource().getName(), this.job.getId()}); //NON-NLS
- module.process(dataSource, new DataSourceIngestModuleProgress(this.job));
- logger.log(Level.INFO, "{0} analysis of {1} (jobId={2}) finished", new Object[]{module.getDisplayName(), this.job.getDataSource().getName(), this.job.getId()}); //NON-NLS
+ logger.log(Level.INFO, "{0} analysis of {1} (pipeline={2}) starting", new Object[]{module.getDisplayName(), ingestJobPipeline.getDataSource().getName(), ingestJobPipeline.getId()}); //NON-NLS
+ module.process(dataSource, new DataSourceIngestModuleProgress(this.ingestJobPipeline));
+ logger.log(Level.INFO, "{0} analysis of {1} (pipeline={2}) finished", new Object[]{module.getDisplayName(), ingestJobPipeline.getDataSource().getName(), ingestJobPipeline.getId()}); //NON-NLS
} catch (Throwable ex) { // Catch-all exception firewall
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
- if (this.job.isCancelled()) {
+ if (this.ingestJobPipeline.isCancelled()) {
break;
- } else if (this.job.currentDataSourceIngestModuleIsCancelled()) {
- this.job.currentDataSourceIngestModuleCancellationCompleted(currentModule.getDisplayName());
+ } else if (this.ingestJobPipeline.currentDataSourceIngestModuleIsCancelled()) {
+ this.ingestJobPipeline.currentDataSourceIngestModuleCancellationCompleted(currentModule.getDisplayName());
}
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java
index bb83b3c63f..417b7bee96 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestTask.java
@@ -20,13 +20,13 @@ package org.sleuthkit.autopsy.ingest;
final class DataSourceIngestTask extends IngestTask {
- DataSourceIngestTask(DataSourceIngestJob job) {
- super(job);
+ DataSourceIngestTask(IngestJobPipeline ingestJobPipeline) {
+ super(ingestJobPipeline);
}
@Override
void execute(long threadId) throws InterruptedException {
super.setThreadId(threadId);
- getIngestJob().process(this);
+ getIngestJobPipeline().process(this);
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java
index 44473e1b39..2f6604e415 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestPipeline.java
@@ -24,15 +24,14 @@ import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
-import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
/**
- * This class manages a sequence of file level ingest modules for a data source
- * ingest job. It starts the modules, runs files through them, and shuts them
+ * This class manages a sequence of file level ingest modules for an
+ * ingest job pipeline. It starts the modules, runs files through them, and shuts them
* down when file level ingest is complete.
*
* This class is thread-safe.
@@ -40,7 +39,7 @@ import org.sleuthkit.datamodel.TskCoreException;
final class FileIngestPipeline {
private static final IngestManager ingestManager = IngestManager.getInstance();
- private final DataSourceIngestJob job;
+ private final IngestJobPipeline ingestJobPipeline;
private final List modules = new ArrayList<>();
private Date startTime;
private volatile boolean running;
@@ -50,12 +49,12 @@ final class FileIngestPipeline {
* modules. It starts the modules, runs files through them, and shuts them
* down when file level ingest is complete.
*
- * @param job The data source ingest job that owns the pipeline.
+ * @param ingestJobPipeline The ingestJobPipeline that owns the pipeline.
* @param moduleTemplates The ingest module templates that define the
* pipeline.
*/
- FileIngestPipeline(DataSourceIngestJob job, List moduleTemplates) {
- this.job = job;
+ FileIngestPipeline(IngestJobPipeline ingestJobPipeline, List moduleTemplates) {
+ this.ingestJobPipeline = ingestJobPipeline;
for (IngestModuleTemplate template : moduleTemplates) {
if (template.isFileIngestModuleTemplate()) {
PipelineModule module = new PipelineModule(template.createFileIngestModule(), template.getModuleName());
@@ -103,7 +102,7 @@ final class FileIngestPipeline {
List errors = new ArrayList<>();
for (PipelineModule module : this.modules) {
try {
- module.startUp(new IngestJobContext(this.job));
+ module.startUp(new IngestJobContext(this.ingestJobPipeline));
} catch (Throwable ex) { // Catch-all exception firewall
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
@@ -120,22 +119,31 @@ final class FileIngestPipeline {
*/
synchronized List process(FileIngestTask task) {
List errors = new ArrayList<>();
- if (!this.job.isCancelled()) {
- AbstractFile file = task.getFile();
+ if (!this.ingestJobPipeline.isCancelled()) {
+ AbstractFile file;
+ try {
+ file = task.getFile();
+ } catch (TskCoreException ex) {
+ // In practice, this task would never have been enqueued since the file
+ // lookup would have failed there.
+ errors.add(new IngestModuleError("File Ingest Pipeline", ex)); // NON-NLS
+ FileIngestPipeline.ingestManager.setIngestTaskProgressCompleted(task);
+ return errors;
+ }
for (PipelineModule module : this.modules) {
try {
FileIngestPipeline.ingestManager.setIngestTaskProgress(task, module.getDisplayName());
- this.job.setCurrentFileIngestModule(module.getDisplayName(), task.getFile().getName());
+ this.ingestJobPipeline.setCurrentFileIngestModule(module.getDisplayName(), task.getFile().getName());
module.process(file);
} catch (Throwable ex) { // Catch-all exception firewall
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
- if (this.job.isCancelled()) {
+ if (this.ingestJobPipeline.isCancelled()) {
break;
}
}
- if (!this.job.isCancelled()) {
+ if (!this.ingestJobPipeline.isCancelled()) {
// Save any properties that have not already been saved to the database
try{
file.save();
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java
index e4a8209df0..8bb918cce1 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java
@@ -19,29 +19,45 @@
package org.sleuthkit.autopsy.ingest;
import java.util.Objects;
+import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.TskCoreException;
/**
* Represents a single file analysis task, which is defined by a file to analyze
* and the InjestJob/Pipeline to run it on.
*/
final class FileIngestTask extends IngestTask {
+
+ private final long fileId;
+ private AbstractFile file = null;
- private final AbstractFile file;
-
- FileIngestTask(DataSourceIngestJob job, AbstractFile file) {
- super(job);
+ FileIngestTask(IngestJobPipeline ingestJobPipeline, AbstractFile file) {
+ super(ingestJobPipeline);
this.file = file;
+ fileId = file.getId();
}
- AbstractFile getFile() {
+ FileIngestTask(IngestJobPipeline ingestJobPipeline, long fileId) {
+ super(ingestJobPipeline);
+ this.fileId = fileId;
+ }
+
+ long getFileId() {
+ return fileId;
+ }
+
+ synchronized AbstractFile getFile() throws TskCoreException {
+ if (file == null) {
+ file = Case.getCurrentCase().getSleuthkitCase().getAbstractFileById(fileId);
+ }
return file;
}
@Override
void execute(long threadId) throws InterruptedException {
super.setThreadId(threadId);
- getIngestJob().process(this);
+ getIngestJobPipeline().process(this);
}
@Override
@@ -53,22 +69,19 @@ final class FileIngestTask extends IngestTask {
return false;
}
FileIngestTask other = (FileIngestTask) obj;
- DataSourceIngestJob job = getIngestJob();
- DataSourceIngestJob otherJob = other.getIngestJob();
- if (job != otherJob && (job == null || !job.equals(otherJob))) {
+ IngestJobPipeline thisPipeline = getIngestJobPipeline();
+ IngestJobPipeline otherPipeline = other.getIngestJobPipeline();
+ if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) {
return false;
}
- if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) {
- return false;
- }
- return true;
+ return (this.fileId == other.fileId);
}
@Override
public int hashCode() {
int hash = 5;
- hash = 47 * hash + Objects.hashCode(getIngestJob());
- hash = 47 * hash + Objects.hashCode(this.file);
+ hash = 47 * hash + Objects.hashCode(getIngestJobPipeline());
+ hash = 47 * hash + Objects.hashCode(this.fileId);
return hash;
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java
index 720313ba15..c73dd940d0 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java
@@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
@@ -27,10 +28,12 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
-
+import java.util.logging.Level;
import org.openide.util.NbBundle;
+import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.DataSource;
/**
* Analyzes one or more data sources using a set of ingest modules specified via
@@ -60,30 +63,40 @@ public final class IngestJob {
return displayName;
}
}
-
+
+ /**
+ * Ingest job mode.
+ */
+ enum Mode {
+ BATCH,
+ STREAMING
+ }
+
+ private static final Logger logger = Logger.getLogger(IngestJob.class.getName());
private final static AtomicLong nextId = new AtomicLong(0L);
private final long id;
- private final Map dataSourceJobs;
+ private final List dataSources = new ArrayList<>();
+ private final List files = new ArrayList<>();
+ private final Mode ingestMode;
+ private final Map ingestJobPipelines;
private final AtomicInteger incompleteJobsCount;
+ private final IngestJobSettings settings;
private volatile CancellationReason cancellationReason;
/**
* Constructs an ingest job that analyzes one or more data sources using a
- * set of ingest modules specified via ingest job settings.
+ * set of ingest modules specified via ingest settings.
*
* @param dataSources The data sources to be ingested.
- * @param settings The ingest job settings.
- * @param doUI Whether or not this job should use progress bars,
- * message boxes for errors, etc.
+ * @param settings The ingest settings.
*/
- IngestJob(Collection dataSources, IngestJobSettings settings, boolean doUI) {
+ IngestJob(Collection dataSources, IngestJobSettings settings) {
this.id = IngestJob.nextId.getAndIncrement();
- this.dataSourceJobs = new ConcurrentHashMap<>();
- for (Content dataSource : dataSources) {
- DataSourceIngestJob dataSourceIngestJob = new DataSourceIngestJob(this, dataSource, settings, doUI);
- this.dataSourceJobs.put(dataSourceIngestJob.getId(), dataSourceIngestJob);
- }
- incompleteJobsCount = new AtomicInteger(dataSourceJobs.size());
+ this.settings = settings;
+ this.ingestJobPipelines = new ConcurrentHashMap<>();
+ this.ingestMode = Mode.BATCH;
+ this.dataSources.addAll(dataSources);
+ incompleteJobsCount = new AtomicInteger(dataSources.size());
cancellationReason = CancellationReason.NOT_CANCELLED;
}
@@ -92,18 +105,28 @@ public final class IngestJob {
* ingest modules specified via ingest job settings. Either all of the files
* in the data source or a given subset of the files will be analyzed.
*
- * @param dataSource The data source to be analyzed
+ * @param dataSource The data source to be analyzed.
* @param files A subset of the files for the data source.
* @param settings The ingest job settings.
- * @param doUI Whether or not this job should use progress bars,
- * message boxes for errors, etc.
*/
- IngestJob(Content dataSource, List files, IngestJobSettings settings, boolean doUI) {
+ IngestJob(Content dataSource, List files, IngestJobSettings settings) {
+ this(Arrays.asList(dataSource), settings);
+ this.files.addAll(files);
+ }
+
+ /**
+ * Constructs an ingest job that analyzes one data source, possibly using
+ * an ingest stream.
+ *
+ * @param settings The ingest job settings.
+ */
+ IngestJob(DataSource dataSource, Mode ingestMode, IngestJobSettings settings) {
this.id = IngestJob.nextId.getAndIncrement();
- this.dataSourceJobs = new ConcurrentHashMap<>();
- DataSourceIngestJob dataSourceIngestJob = new DataSourceIngestJob(this, dataSource, files, settings, doUI);
- this.dataSourceJobs.put(dataSourceIngestJob.getId(), dataSourceIngestJob);
- incompleteJobsCount = new AtomicInteger(dataSourceJobs.size());
+ this.ingestJobPipelines = new ConcurrentHashMap<>();
+ this.dataSources.add(dataSource);
+ this.settings = settings;
+ this.ingestMode = ingestMode;
+ incompleteJobsCount = new AtomicInteger(1);
cancellationReason = CancellationReason.NOT_CANCELLED;
}
@@ -124,18 +147,35 @@ public final class IngestJob {
* @return True or false.
*/
boolean hasIngestPipeline() {
- /**
- * TODO: This could actually be done more simply by adding a method to
- * the IngestJobSettings to check for at least one enabled ingest module
- * template. The test could then be done in the ingest manager before
- * even constructing an ingest job.
- */
- for (DataSourceIngestJob dataSourceJob : this.dataSourceJobs.values()) {
- if (dataSourceJob.hasIngestPipeline()) {
- return true;
- }
+ return (!settings.getEnabledIngestModuleTemplates().isEmpty());
+ }
+
+ /**
+ * Add a set of files (by object ID) to be ingested.
+ *
+ * @param fileObjIds the list of file IDs
+ */
+ void addStreamingIngestFiles(List fileObjIds) {
+ if (ingestJobPipelines.isEmpty()) {
+ logger.log(Level.SEVERE, "Attempted to add streaming ingest files with no IngestJobPipeline");
+ return;
}
- return false;
+ // Streaming ingest jobs will only have one data source
+ IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
+ streamingIngestPipeline.addStreamingIngestFiles(fileObjIds);
+ }
+
+ /**
+ * Start data source processing for streaming ingest.
+ */
+ void processStreamingIngestDataSource() {
+ if (ingestJobPipelines.isEmpty()) {
+ logger.log(Level.SEVERE, "Attempted to start data source ingest with no IngestJobPipeline");
+ return;
+ }
+ // Streaming ingest jobs will only have one data source
+ IngestJobPipeline streamingIngestPipeline = ingestJobPipelines.values().iterator().next();
+ streamingIngestPipeline.processStreamingIngestDataSource();
}
/**
@@ -145,17 +185,32 @@ public final class IngestJob {
* @return A collection of ingest module start up errors, empty on success.
*/
List start() {
+
/*
- * Try to start each data source ingest job. Note that there is a not
- * unwarranted assumption here that if there is going to be a module
- * startup failure, it will be for the first data source ingest job.
+ * Set up the pipeline(s)
+ */
+ if (files.isEmpty()) {
+ for (Content dataSource : dataSources) {
+ IngestJobPipeline ingestJobPipeline = new IngestJobPipeline(this, dataSource, settings);
+ this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
+ }
+ } else {
+ IngestJobPipeline ingestJobPipeline = new IngestJobPipeline(this, dataSources.get(0), files, settings);
+ this.ingestJobPipelines.put(ingestJobPipeline.getId(), ingestJobPipeline);
+ }
+ incompleteJobsCount.set(ingestJobPipelines.size());
+
+ /*
+ * Try to start each data source ingest job. Note that there is an
+ * assumption here that if there is going to be a module
+ * startup failure, it will be for the first ingest job pipeline.
*
* TODO (RC): Consider separating module start up from pipeline startup
* so that no processing is done if this assumption is false.
*/
List errors = new ArrayList<>();
- for (DataSourceIngestJob dataSourceJob : this.dataSourceJobs.values()) {
- errors.addAll(dataSourceJob.start());
+ for (IngestJobPipeline ingestJobPipeline : this.ingestJobPipelines.values()) {
+ errors.addAll(ingestJobPipeline.start());
if (errors.isEmpty() == false) {
break;
}
@@ -165,7 +220,7 @@ public final class IngestJob {
* Handle start up success or failure.
*/
if (errors.isEmpty()) {
- for (DataSourceIngestJob dataSourceJob : this.dataSourceJobs.values()) {
+ for (IngestJobPipeline dataSourceJob : this.ingestJobPipelines.values()) {
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, dataSourceJob.getId(), dataSourceJob.getDataSource());
}
} else {
@@ -174,6 +229,15 @@ public final class IngestJob {
return errors;
}
+
+ /**
+ * Get the ingest mode for this job (batch or streaming).
+ *
+ * @return the ingest mode.
+ */
+ Mode getIngestMode() {
+ return ingestMode;
+ }
/**
* Gets a snapshot of the progress of this ingest job.
@@ -187,6 +251,8 @@ public final class IngestJob {
/**
* Gets a snapshot of the progress of this ingest job.
*
+ * @param getIngestTasksSnapshot
+ *
* @return The snapshot.
*/
public ProgressSnapshot getSnapshot(boolean getIngestTasksSnapshot) {
@@ -199,9 +265,9 @@ public final class IngestJob {
*
* @return A list of data source ingest job progress snapshots.
*/
- List getDataSourceIngestJobSnapshots() {
- List snapshots = new ArrayList<>();
- this.dataSourceJobs.values().stream().forEach((dataSourceJob) -> {
+ List getDataSourceIngestJobSnapshots() {
+ List snapshots = new ArrayList<>();
+ this.ingestJobPipelines.values().stream().forEach((dataSourceJob) -> {
snapshots.add(dataSourceJob.getSnapshot(true));
});
return snapshots;
@@ -230,7 +296,7 @@ public final class IngestJob {
*/
public void cancel(CancellationReason reason) {
this.cancellationReason = reason;
- this.dataSourceJobs.values().stream().forEach((job) -> {
+ this.ingestJobPipelines.values().stream().forEach((job) -> {
job.cancel(reason);
});
}
@@ -255,17 +321,17 @@ public final class IngestJob {
}
/**
- * Provides a callback for completed data source ingest jobs, allowing this
+ * Provides a callback for completed ingest job pipeline, allowing this
* ingest job to notify the ingest manager when it is complete.
*
- * @param job A completed data source ingest job.
+ * @param ingestJobPipeline A completed ingestJobPipeline.
*/
- void dataSourceJobFinished(DataSourceIngestJob job) {
+ void ingestJobPipelineFinished(IngestJobPipeline ingestJobPipeline) {
IngestManager ingestManager = IngestManager.getInstance();
- if (!job.isCancelled()) {
- ingestManager.fireDataSourceAnalysisCompleted(id, job.getId(), job.getDataSource());
+ if (!ingestJobPipeline.isCancelled()) {
+ ingestManager.fireDataSourceAnalysisCompleted(id, ingestJobPipeline.getId(), ingestJobPipeline.getDataSource());
} else {
- IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, job.getId(), job.getDataSource());
+ IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, ingestJobPipeline.getId(), ingestJobPipeline.getDataSource());
}
if (incompleteJobsCount.decrementAndGet() == 0) {
ingestManager.finishIngestJob(this);
@@ -290,9 +356,9 @@ public final class IngestJob {
*/
public final class DataSourceProcessingSnapshot {
- private final DataSourceIngestJob.Snapshot snapshot;
+ private final Snapshot snapshot;
- private DataSourceProcessingSnapshot(DataSourceIngestJob.Snapshot snapshot) {
+ private DataSourceProcessingSnapshot(Snapshot snapshot) {
this.snapshot = snapshot;
}
@@ -346,13 +412,13 @@ public final class IngestJob {
fileIngestRunning = false;
fileIngestStartTime = null;
dataSourceProcessingSnapshots = new ArrayList<>();
- for (DataSourceIngestJob dataSourceJob : dataSourceJobs.values()) {
- DataSourceIngestJob.Snapshot snapshot = dataSourceJob.getSnapshot(getIngestTasksSnapshot);
+ for (IngestJobPipeline pipeline : ingestJobPipelines.values()) {
+ Snapshot snapshot = pipeline.getSnapshot(getIngestTasksSnapshot);
dataSourceProcessingSnapshots.add(new DataSourceProcessingSnapshot(snapshot));
if (null == dataSourceModule) {
DataSourceIngestPipeline.PipelineModule module = snapshot.getDataSourceLevelIngestModule();
if (null != module) {
- dataSourceModule = new DataSourceIngestModuleHandle(dataSourceJobs.get(snapshot.getJobId()), module);
+ dataSourceModule = new DataSourceIngestModuleHandle(ingestJobPipelines.get(snapshot.getJobId()), module);
}
}
if (snapshot.getFileIngestIsRunning()) {
@@ -433,7 +499,7 @@ public final class IngestJob {
*/
public static class DataSourceIngestModuleHandle {
- private final DataSourceIngestJob job;
+ private final IngestJobPipeline ingestJobPipeline;
private final DataSourceIngestPipeline.PipelineModule module;
private final boolean cancelled;
@@ -442,14 +508,13 @@ public final class IngestJob {
* used to get basic information about the module and to request
* cancellation of the module.
*
- * @param job The data source ingest job that owns the data source
- * level ingest module.
+ * @param ingestJobPipeline The ingestJobPipeline that owns the data source level ingest module.
* @param module The data source level ingest module.
*/
- private DataSourceIngestModuleHandle(DataSourceIngestJob job, DataSourceIngestPipeline.PipelineModule module) {
- this.job = job;
+ private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.PipelineModule module) {
+ this.ingestJobPipeline = ingestJobPipeline;
this.module = module;
- this.cancelled = job.currentDataSourceIngestModuleIsCancelled();
+ this.cancelled = ingestJobPipeline.currentDataSourceIngestModuleIsCancelled();
}
/**
@@ -499,8 +564,8 @@ public final class IngestJob {
* modules participating in this workaround will need to consult the
* cancelled flag in the adapters.
*/
- if (this.job.getCurrentDataSourceIngestModule() == this.module) {
- this.job.cancelCurrentDataSourceIngestModule();
+ if (this.ingestJobPipeline.getCurrentDataSourceIngestModule() == this.module) {
+ this.ingestJobPipeline.cancelCurrentDataSourceIngestModule();
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java
index 74da1d814b..697e512ad8 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java
@@ -28,10 +28,10 @@ import org.sleuthkit.datamodel.Content;
*/
public final class IngestJobContext {
- private final DataSourceIngestJob ingestJob;
+ private final IngestJobPipeline ingestJobPipeline;
- IngestJobContext(DataSourceIngestJob ingestJob) {
- this.ingestJob = ingestJob;
+ IngestJobContext(IngestJobPipeline ingestJobPipeline) {
+ this.ingestJobPipeline = ingestJobPipeline;
}
/**
@@ -40,7 +40,7 @@ public final class IngestJobContext {
* @return The context string.
*/
public String getExecutionContext() {
- return this.ingestJob.getExecutionContext();
+ return this.ingestJobPipeline.getExecutionContext();
}
/**
@@ -49,7 +49,7 @@ public final class IngestJobContext {
* @return The data source.
*/
public Content getDataSource() {
- return this.ingestJob.getDataSource();
+ return this.ingestJobPipeline.getDataSource();
}
/**
@@ -58,7 +58,7 @@ public final class IngestJobContext {
* @return The ingest job identifier.
*/
public long getJobId() {
- return this.ingestJob.getId();
+ return this.ingestJobPipeline.getId();
}
/**
@@ -83,7 +83,7 @@ public final class IngestJobContext {
* @return True or false.
*/
public boolean dataSourceIngestIsCancelled() {
- return this.ingestJob.currentDataSourceIngestModuleIsCancelled() || this.ingestJob.isCancelled();
+ return this.ingestJobPipeline.currentDataSourceIngestModuleIsCancelled() || this.ingestJobPipeline.isCancelled();
}
/**
@@ -94,7 +94,7 @@ public final class IngestJobContext {
* @return True or false.
*/
public boolean fileIngestIsCancelled() {
- return this.ingestJob.isCancelled();
+ return this.ingestJobPipeline.isCancelled();
}
/**
@@ -104,7 +104,7 @@ public final class IngestJobContext {
* @return True or false.
*/
public boolean processingUnallocatedSpace() {
- return this.ingestJob.shouldProcessUnallocatedSpace();
+ return this.ingestJobPipeline.shouldProcessUnallocatedSpace();
}
/**
@@ -127,7 +127,7 @@ public final class IngestJobContext {
* @param files The files to be added.
*/
public void addFilesToJob(List files) {
- this.ingestJob.addFiles(files);
+ this.ingestJobPipeline.addFiles(files);
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java
new file mode 100644
index 0000000000..4e8e9c4019
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java
@@ -0,0 +1,85 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.ingest;
+
+import java.util.List;
+
+/**
+ * Implementation of IngestStream. Will collect data from the data source
+ * processor to be sent to the ingest pipeline.
+ */
+class IngestJobInputStream implements IngestStream {
+
+ private final IngestJob ingestJob;
+ private boolean closed = false;
+ private boolean isStopped = false;
+ private final IngestJobStartResult ingestJobStartResult;
+
+ /**
+ * Create an ingest stream object, saving a reference to the associated
+ * IngestJob;
+ *
+ * @param ingestJob The IngestJob associated with this stream.
+ */
+ IngestJobInputStream(IngestJob ingestJob) {
+ this.ingestJob = ingestJob;
+ ingestJobStartResult = IngestManager.getInstance().startIngestJob(ingestJob);
+ }
+
+ /**
+ * Check the result from starting the ingest jobs.
+ *
+ * @return The IngestJobStartResult object returned from IngestManager.startIngestJob().
+ */
+ IngestJobStartResult getIngestJobStartResult() {
+ return ingestJobStartResult;
+ }
+
+ @Override
+ public synchronized void addFiles(List fileObjectIds) throws IngestStreamClosedException {
+ if (closed) {
+ throw new IngestStreamClosedException("Can not add files - ingest stream is closed");
+ }
+ ingestJob.addStreamingIngestFiles(fileObjectIds);
+ }
+
+ @Override
+ public synchronized void close() {
+ closed = true;
+ ingestJob.processStreamingIngestDataSource();
+ }
+
+ @Override
+ public synchronized boolean isClosed() {
+ return closed;
+ }
+
+ @Override
+ public synchronized void stop() {
+ this.closed = true;
+ this.isStopped = true;
+
+ ingestJob.cancel(IngestJob.CancellationReason.USER_CANCELLED);
+ }
+
+ @Override
+ public synchronized boolean wasStopped() {
+ return isStopped;
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java
similarity index 83%
rename from Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java
rename to Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java
index 4527892323..5921071e65 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestJob.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java
@@ -18,7 +18,6 @@
*/
package org.sleuthkit.autopsy.ingest;
-import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
@@ -39,10 +38,9 @@ import org.openide.util.NbBundle;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestPipeline.PipelineModule;
-import org.sleuthkit.autopsy.ingest.IngestJob.CancellationReason;
import org.sleuthkit.autopsy.ingest.IngestTasksScheduler.IngestJobTasksSnapshot;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
@@ -59,11 +57,11 @@ import org.sleuthkit.autopsy.python.FactoryClassNameNormalizer;
* Encapsulates a data source and the ingest module pipelines used to process
* it.
*/
-public final class DataSourceIngestJob {
+final class IngestJobPipeline {
private static String AUTOPSY_MODULE_PREFIX = "org.sleuthkit.autopsy";
- private static final Logger logger = Logger.getLogger(DataSourceIngestJob.class.getName());
+ private static final Logger logger = Logger.getLogger(IngestJobPipeline.class.getName());
// to match something like: "org.python.proxies.GPX_Parser_Module$GPXParserFileIngestModuleFactory$14"
private static final Pattern JYTHON_REGEX = Pattern.compile("org\\.python\\.proxies\\.(.+?)\\$(.+?)(\\$[0-9]*)?$");
@@ -78,7 +76,8 @@ public final class DataSourceIngestJob {
private static final AtomicLong nextJobId = new AtomicLong(0L);
private final long id;
private final IngestJobSettings settings;
- private final Content dataSource;
+ private Content dataSource = null;
+ private final IngestJob.Mode ingestMode;
private final List files = new ArrayList<>();
/**
@@ -90,22 +89,26 @@ public final class DataSourceIngestJob {
* Setting up for processing.
*/
INITIALIZATION,
+ /**
+ * Running only file ingest modules (used only for streaming ingest)
+ */
+ FIRST_STAGE_FILES_ONLY,
/**
* Running high priority data source level ingest modules and file level
* ingest modules.
*/
- FIRST,
+ FIRST_STAGE_FILES_AND_DATASOURCE,
/**
* Running lower priority, usually long-running, data source level
* ingest modules.
*/
- SECOND,
+ SECOND_STAGE,
/**
* Cleaning up.
*/
FINALIZATION
};
- private volatile Stages stage = DataSourceIngestJob.Stages.INITIALIZATION;
+ private volatile Stages stage = IngestJobPipeline.Stages.INITIALIZATION;
private final Object stageCompletionCheckLock = new Object();
/**
@@ -189,15 +192,13 @@ public final class DataSourceIngestJob {
* Constructs an object that encapsulates a data source and the ingest
* module pipelines used to analyze it.
*
- * @param parentJob The ingest job of which this data source ingest
- * job is a part.
- * @param dataSource The data source to be ingested.
- * @param settings The settings for the ingest job.
- * @param runInteractively Whether or not this job should use NetBeans
- * progress handles.
+ * @param parentJob The ingest job of which this data source ingest job is
+ * a part.
+ * @param dataSource The data source to be ingested.
+ * @param settings The settings for the ingest job.
*/
- DataSourceIngestJob(IngestJob parentJob, Content dataSource, IngestJobSettings settings, boolean runInteractively) {
- this(parentJob, dataSource, Collections.emptyList(), settings, runInteractively);
+ IngestJobPipeline(IngestJob parentJob, Content dataSource, IngestJobSettings settings) {
+ this(parentJob, dataSource, Collections.emptyList(), settings);
}
/**
@@ -205,22 +206,22 @@ public final class DataSourceIngestJob {
* module pipelines used to analyze it. Either all of the files in the data
* source or a given subset of the files will be analyzed.
*
- * @param parentJob The ingest job of which this data source ingest
- * job is a part.
- * @param dataSource The data source to be ingested.
- * @param files A subset of the files for the data source.
- * @param settings The settings for the ingest job.
- * @param runInteractively Whether or not this job should use NetBeans
- * progress handles.
+ * @param parentJob The ingest job of which this data source ingest job is
+ * a part.
+ * @param dataSource The data source to be ingested.
+ * @param files A subset of the files for the data source.
+ * @param settings The settings for the ingest job.
*/
- DataSourceIngestJob(IngestJob parentJob, Content dataSource, List files, IngestJobSettings settings, boolean runInteractively) {
+ IngestJobPipeline(IngestJob parentJob, Content dataSource, List files, IngestJobSettings settings) {
this.parentJob = parentJob;
- this.id = DataSourceIngestJob.nextJobId.getAndIncrement();
+ this.id = IngestJobPipeline.nextJobId.getAndIncrement();
this.dataSource = dataSource;
this.files.addAll(files);
+ this.ingestMode = parentJob.getIngestMode();
this.settings = settings;
- this.doUI = runInteractively;
+ this.doUI = RuntimeProperties.runningWithGUI();
this.createTime = new Date().getTime();
+ this.stage = Stages.INITIALIZATION;
this.createIngestPipelines();
}
@@ -305,7 +306,6 @@ public final class DataSourceIngestJob {
Map fileModuleTemplates = new LinkedHashMap<>();
// mappings for jython modules. These mappings are only used to determine modules in the pipelineconfig.xml.
-
Map jythonDataSourceModuleTemplates = new LinkedHashMap<>();
Map jythonFileModuleTemplates = new LinkedHashMap<>();
@@ -323,13 +323,13 @@ public final class DataSourceIngestJob {
* ordered lists of ingest module templates for each ingest pipeline.
*/
IngestPipelinesConfiguration pipelineConfigs = IngestPipelinesConfiguration.getInstance();
- List firstStageDataSourceModuleTemplates = DataSourceIngestJob.getConfiguredIngestModuleTemplates(
+ List firstStageDataSourceModuleTemplates = IngestJobPipeline.getConfiguredIngestModuleTemplates(
dataSourceModuleTemplates, jythonDataSourceModuleTemplates, pipelineConfigs.getStageOneDataSourceIngestPipelineConfig());
- List fileIngestModuleTemplates = DataSourceIngestJob.getConfiguredIngestModuleTemplates(
+ List fileIngestModuleTemplates = IngestJobPipeline.getConfiguredIngestModuleTemplates(
fileModuleTemplates, jythonFileModuleTemplates, pipelineConfigs.getFileIngestPipelineConfig());
- List secondStageDataSourceModuleTemplates = DataSourceIngestJob.getConfiguredIngestModuleTemplates(
+ List secondStageDataSourceModuleTemplates = IngestJobPipeline.getConfiguredIngestModuleTemplates(
dataSourceModuleTemplates, null, pipelineConfigs.getStageTwoDataSourceIngestPipelineConfig());
/**
@@ -508,6 +508,10 @@ public final class DataSourceIngestJob {
* @return A collection of ingest module startup errors, empty on success.
*/
List start() {
+ if (dataSource == null) {
+ // TODO - Remove once data source is always present during initialization
+ throw new IllegalStateException("Ingest started before setting data source");
+ }
List errors = startUpIngestPipelines();
if (errors.isEmpty()) {
try {
@@ -515,9 +519,15 @@ public final class DataSourceIngestJob {
} catch (TskCoreException | NoCurrentCaseException ex) {
logErrorMessage(Level.WARNING, "Failed to add ingest job info to case database", ex); //NON-NLS
}
+
if (this.hasFirstStageDataSourceIngestPipeline() || this.hasFileIngestPipeline()) {
- logInfoMessage("Starting first stage analysis"); //NON-NLS
- this.startFirstStage();
+ if (ingestMode == IngestJob.Mode.BATCH) {
+ logInfoMessage("Starting first stage analysis"); //NON-NLS
+ this.startFirstStage();
+ } else {
+ logInfoMessage("Preparing for first stage analysis"); //NON-NLS
+ this.startFileIngestStreaming();
+ }
} else if (this.hasSecondStageDataSourceIngestPipeline()) {
logInfoMessage("Starting second stage analysis"); //NON-NLS
this.startSecondStage();
@@ -581,7 +591,7 @@ public final class DataSourceIngestJob {
* Starts the first stage of this job.
*/
private void startFirstStage() {
- this.stage = DataSourceIngestJob.Stages.FIRST;
+ this.stage = IngestJobPipeline.Stages.FIRST_STAGE_FILES_AND_DATASOURCE;
if (this.hasFileIngestPipeline()) {
synchronized (this.fileIngestProgressLock) {
@@ -614,13 +624,13 @@ public final class DataSourceIngestJob {
*/
if (this.hasFirstStageDataSourceIngestPipeline() && this.hasFileIngestPipeline()) {
logInfoMessage("Scheduling first stage data source and file level analysis tasks"); //NON-NLS
- DataSourceIngestJob.taskScheduler.scheduleIngestTasks(this);
+ IngestJobPipeline.taskScheduler.scheduleIngestTasks(this);
} else if (this.hasFirstStageDataSourceIngestPipeline()) {
logInfoMessage("Scheduling first stage data source level analysis tasks"); //NON-NLS
- DataSourceIngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
+ IngestJobPipeline.taskScheduler.scheduleDataSourceIngestTask(this);
} else {
logInfoMessage("Scheduling file level analysis tasks, no first stage data source level analysis configured"); //NON-NLS
- DataSourceIngestJob.taskScheduler.scheduleFileIngestTasks(this, this.files);
+ IngestJobPipeline.taskScheduler.scheduleFileIngestTasks(this, this.files);
/**
* No data source ingest task has been scheduled for this stage, and
@@ -634,12 +644,75 @@ public final class DataSourceIngestJob {
}
}
+ /**
+ * Prepares for file ingest. Used for streaming ingest. Does not schedule
+ * any file tasks - those will come from calls to addStreamingIngestFiles().
+ */
+ private void startFileIngestStreaming() {
+ synchronized (this.stageCompletionCheckLock) {
+ this.stage = IngestJobPipeline.Stages.FIRST_STAGE_FILES_ONLY;
+ }
+
+ if (this.hasFileIngestPipeline()) {
+ synchronized (this.fileIngestProgressLock) {
+ this.estimatedFilesToProcess = 0; // Set to indeterminate until the data source is complete
+ }
+ }
+
+ if (this.doUI) {
+ if (this.hasFileIngestPipeline()) {
+ this.startFileIngestProgressBar();
+ }
+ }
+
+ logInfoMessage("Waiting for streaming files"); //NON-NLS
+ }
+
+ /**
+ * Start data source ingest. Used for streaming ingest when the data source
+ * is not ready when ingest starts.
+ */
+ private void startDataSourceIngestStreaming() {
+
+ // Now that the data source is complete, we can get the estimated number of
+ // files and switch to a determinate progress bar.
+ synchronized (fileIngestProgressLock) {
+ if (null != this.fileIngestProgress) {
+ estimatedFilesToProcess = dataSource.accept(new GetFilesCountVisitor());
+ fileIngestProgress.switchToDeterminate((int) estimatedFilesToProcess);
+ }
+ }
+
+ if (this.doUI) {
+ /**
+ * Start the first stage data source ingest progress bar.
+ */
+ if (this.hasFirstStageDataSourceIngestPipeline()) {
+ this.startDataSourceIngestProgressBar();
+ }
+ }
+
+ /**
+ * Make the first stage data source level ingest pipeline the current
+ * data source level pipeline.
+ */
+ synchronized (this.dataSourceIngestPipelineLock) {
+ this.currentDataSourceIngestPipeline = this.firstStageDataSourceIngestPipeline;
+ }
+
+ logInfoMessage("Scheduling first stage data source level analysis tasks"); //NON-NLS
+ synchronized (this.stageCompletionCheckLock) {
+ this.stage = IngestJobPipeline.Stages.FIRST_STAGE_FILES_AND_DATASOURCE;
+ IngestJobPipeline.taskScheduler.scheduleDataSourceIngestTask(this);
+ }
+ }
+
/**
* Starts the second stage of this ingest job.
*/
private void startSecondStage() {
logInfoMessage("Starting second stage analysis"); //NON-NLS
- this.stage = DataSourceIngestJob.Stages.SECOND;
+ this.stage = IngestJobPipeline.Stages.SECOND_STAGE;
if (this.doUI) {
this.startDataSourceIngestProgressBar();
}
@@ -647,7 +720,7 @@ public final class DataSourceIngestJob {
this.currentDataSourceIngestPipeline = this.secondStageDataSourceIngestPipeline;
}
logInfoMessage("Scheduling second stage data source level analysis tasks"); //NON-NLS
- DataSourceIngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
+ IngestJobPipeline.taskScheduler.scheduleDataSourceIngestTask(this);
}
/**
@@ -669,12 +742,12 @@ public final class DataSourceIngestJob {
// the user wants to cancel only the currently executing
// data source ingest module or the entire ingest job.
DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel();
- String dialogTitle = NbBundle.getMessage(DataSourceIngestJob.this.getClass(), "IngestJob.cancellationDialog.title");
+ String dialogTitle = NbBundle.getMessage(IngestJobPipeline.this.getClass(), "IngestJob.cancellationDialog.title");
JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE);
if (panel.cancelAllDataSourceIngestModules()) {
- DataSourceIngestJob.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
+ IngestJobPipeline.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
} else {
- DataSourceIngestJob.this.cancelCurrentDataSourceIngestModule();
+ IngestJobPipeline.this.cancelCurrentDataSourceIngestModule();
}
return true;
}
@@ -701,7 +774,7 @@ public final class DataSourceIngestJob {
// the cancel button on the progress bar and the OK button
// of a cancelation confirmation dialog supplied by
// NetBeans.
- DataSourceIngestJob.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
+ IngestJobPipeline.this.cancel(IngestJob.CancellationReason.USER_CANCELLED);
return true;
}
});
@@ -716,13 +789,48 @@ public final class DataSourceIngestJob {
* completed and does a stage transition if they are.
*/
private void checkForStageCompleted() {
+ if (ingestMode == IngestJob.Mode.BATCH) {
+ checkForStageCompletedBatch();
+ } else {
+ checkForStageCompletedStreaming();
+ }
+ }
+
+ /**
+ * Checks to see if the ingest tasks for the current stage of this job are
+ * completed and does a stage transition if they are.
+ */
+ private void checkForStageCompletedBatch() {
synchronized (this.stageCompletionCheckLock) {
- if (DataSourceIngestJob.taskScheduler.tasksForJobAreCompleted(this)) {
+ if (IngestJobPipeline.taskScheduler.currentTasksAreCompleted(this)) {
switch (this.stage) {
- case FIRST:
+ case FIRST_STAGE_FILES_AND_DATASOURCE:
this.finishFirstStage();
break;
- case SECOND:
+ case SECOND_STAGE:
+ this.finish();
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Checks to see if the ingest tasks for the current stage of this job are
+ * completed and does a stage transition if they are.
+ */
+ private void checkForStageCompletedStreaming() {
+ synchronized (this.stageCompletionCheckLock) {
+ if (IngestJobPipeline.taskScheduler.currentTasksAreCompleted(this)) {
+ switch (this.stage) {
+ case FIRST_STAGE_FILES_ONLY:
+ // Nothing to do here - need to wait for the data source
+ break;
+ case FIRST_STAGE_FILES_AND_DATASOURCE:
+ // Finish file and data source ingest, start second stage (if applicable)
+ this.finishFirstStage();
+ break;
+ case SECOND_STAGE:
this.finish();
break;
}
@@ -786,7 +894,7 @@ public final class DataSourceIngestJob {
*/
private void finish() {
logInfoMessage("Finished analysis"); //NON-NLS
- this.stage = DataSourceIngestJob.Stages.FINALIZATION;
+ this.stage = IngestJobPipeline.Stages.FINALIZATION;
if (this.doUI) {
// Finish the second stage data source ingest progress bar, if it hasn't
@@ -818,7 +926,7 @@ public final class DataSourceIngestJob {
logErrorMessage(Level.WARNING, "Failed to set job end date in case database", ex);
}
}
- this.parentJob.dataSourceJobFinished(this);
+ this.parentJob.ingestJobPipelineFinished(this);
}
/**
@@ -853,7 +961,7 @@ public final class DataSourceIngestJob {
}
} finally {
- DataSourceIngestJob.taskScheduler.notifyTaskCompleted(task);
+ IngestJobPipeline.taskScheduler.notifyTaskCompleted(task);
this.checkForStageCompleted();
}
}
@@ -874,7 +982,18 @@ public final class DataSourceIngestJob {
if (!this.isCancelled()) {
FileIngestPipeline pipeline = this.fileIngestPipelinesQueue.take();
if (!pipeline.isEmpty()) {
- AbstractFile file = task.getFile();
+ AbstractFile file;
+ try {
+ file = task.getFile();
+ } catch (TskCoreException ex) {
+ // In practice, this task would never have been enqueued since the file
+ // lookup would have failed there.
+ List errors = new ArrayList<>();
+ errors.add(new IngestModuleError("Ingest Job Pipeline", ex));
+ logIngestModuleErrors(errors);
+ this.fileIngestPipelinesQueue.put(pipeline);
+ return;
+ }
synchronized (this.fileIngestProgressLock) {
++this.processedFiles;
@@ -918,11 +1037,40 @@ public final class DataSourceIngestJob {
this.fileIngestPipelinesQueue.put(pipeline);
}
} finally {
- DataSourceIngestJob.taskScheduler.notifyTaskCompleted(task);
+ IngestJobPipeline.taskScheduler.notifyTaskCompleted(task);
this.checkForStageCompleted();
}
}
+ /**
+ * Add a list of files (by object ID) to the ingest queue. Must call start()
+ * prior to adding files.
+ *
+ * @param fileObjIds List of newly added file IDs.
+ */
+ void addStreamingIngestFiles(List fileObjIds) {
+
+ // Return if there are no file ingest modules enabled.
+ if (!hasFileIngestPipeline()) {
+ return;
+ }
+
+ if (stage.equals(Stages.FIRST_STAGE_FILES_ONLY)) {
+ IngestJobPipeline.taskScheduler.scheduleStreamedFileIngestTasks(this, fileObjIds);
+ } else {
+ logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported");
+ }
+ }
+
+ /**
+ * Starts data source ingest. Should be called after the data source
+ * processor has finished (i.e., all files are in the database)
+ */
+ void processStreamingIngestDataSource() {
+ startDataSourceIngestStreaming();
+ checkForStageCompleted();
+ }
+
/**
* Adds more files from the data source for this job to the job, e.g., adds
* extracted or carved files. Not currently supported for the second stage
@@ -931,8 +1079,9 @@ public final class DataSourceIngestJob {
* @param files A list of the files to add.
*/
void addFiles(List files) {
- if (DataSourceIngestJob.Stages.FIRST == this.stage) {
- DataSourceIngestJob.taskScheduler.fastTrackFileIngestTasks(this, files);
+ if (stage.equals(Stages.FIRST_STAGE_FILES_ONLY)
+ || stage.equals(Stages.FIRST_STAGE_FILES_AND_DATASOURCE)) {
+ IngestJobPipeline.taskScheduler.fastTrackFileIngestTasks(this, files);
} else {
logErrorMessage(Level.SEVERE, "Adding files to job during second stage analysis not supported");
}
@@ -1108,12 +1257,12 @@ public final class DataSourceIngestJob {
void cancel(IngestJob.CancellationReason reason) {
this.cancelled = true;
this.cancellationReason = reason;
- DataSourceIngestJob.taskScheduler.cancelPendingTasksForIngestJob(this);
+ IngestJobPipeline.taskScheduler.cancelPendingTasksForIngestJob(this);
if (this.doUI) {
synchronized (this.dataSourceIngestProgressLock) {
if (null != dataSourceIngestProgress) {
- dataSourceIngestProgress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()));
+ dataSourceIngestProgress.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", this.dataSource.getName()));
dataSourceIngestProgress.progress(NbBundle.getMessage(this.getClass(), "IngestJob.progress.cancelling"));
}
}
@@ -1165,7 +1314,7 @@ public final class DataSourceIngestJob {
* @param message The message.
*/
private void logInfoMessage(String message) {
- logger.log(Level.INFO, String.format("%s (data source = %s, objId = %d, jobId = %d)", message, dataSource.getName(), dataSource.getId(), id)); //NON-NLS
+ logger.log(Level.INFO, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id = %d)", message, this.dataSource.getName(), this.dataSource.getId(), id, ingestJob.getIngestJobId())); //NON-NLS
}
/**
@@ -1177,7 +1326,7 @@ public final class DataSourceIngestJob {
* @param throwable The throwable associated with the error.
*/
private void logErrorMessage(Level level, String message, Throwable throwable) {
- logger.log(level, String.format("%s (data source = %s, objId = %d, jobId = %d)", message, dataSource.getName(), dataSource.getId(), id), throwable); //NON-NLS
+ logger.log(level, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id = %d)", message, this.dataSource.getName(), this.dataSource.getId(), id, ingestJob.getIngestJobId()), throwable); //NON-NLS
}
/**
@@ -1188,7 +1337,7 @@ public final class DataSourceIngestJob {
* @param message The message.
*/
private void logErrorMessage(Level level, String message) {
- logger.log(level, String.format("%s (data source = %s, objId = %d, jobId = %d)", message, dataSource.getName(), dataSource.getId(), id)); //NON-NLS
+ logger.log(level, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id %d)", message, this.dataSource.getName(), this.dataSource.getId(), id, ingestJob.getIngestJobId())); //NON-NLS
}
/**
@@ -1237,7 +1386,7 @@ public final class DataSourceIngestJob {
estimatedFilesToProcessCount = this.estimatedFilesToProcess;
snapShotTime = new Date().getTime();
}
- tasksSnapshot = DataSourceIngestJob.taskScheduler.getTasksSnapshotForJob(id);
+ tasksSnapshot = IngestJobPipeline.taskScheduler.getTasksSnapshotForJob(id);
}
@@ -1246,194 +1395,4 @@ public final class DataSourceIngestJob {
cancelled, cancellationReason, cancelledDataSourceIngestModules,
processedFilesCount, estimatedFilesToProcessCount, snapShotTime, tasksSnapshot);
}
-
- /**
- * Stores basic diagnostic statistics for a data source ingest job.
- */
- public static final class Snapshot implements Serializable {
-
- private static final long serialVersionUID = 1L;
-
- private final String dataSource;
- private final long jobId;
- private final long jobStartTime;
- private final long snapShotTime;
- transient private final PipelineModule dataSourceLevelIngestModule;
- private final boolean fileIngestRunning;
- private final Date fileIngestStartTime;
- private final long processedFiles;
- private final long estimatedFilesToProcess;
- private final IngestJobTasksSnapshot tasksSnapshot;
- transient private final boolean jobCancelled;
- transient private final CancellationReason jobCancellationReason;
- transient private final List cancelledDataSourceModules;
-
- /**
- * Constructs an object to store basic diagnostic statistics for a data
- * source ingest job.
- */
- Snapshot(String dataSourceName, long jobId, long jobStartTime, PipelineModule dataSourceIngestModule,
- boolean fileIngestRunning, Date fileIngestStartTime,
- boolean jobCancelled, CancellationReason cancellationReason, List cancelledModules,
- long processedFiles, long estimatedFilesToProcess,
- long snapshotTime, IngestJobTasksSnapshot tasksSnapshot) {
- this.dataSource = dataSourceName;
- this.jobId = jobId;
- this.jobStartTime = jobStartTime;
- this.dataSourceLevelIngestModule = dataSourceIngestModule;
-
- this.fileIngestRunning = fileIngestRunning;
- this.fileIngestStartTime = fileIngestStartTime;
- this.jobCancelled = jobCancelled;
- this.jobCancellationReason = cancellationReason;
- this.cancelledDataSourceModules = cancelledModules;
-
- this.processedFiles = processedFiles;
- this.estimatedFilesToProcess = estimatedFilesToProcess;
- this.snapShotTime = snapshotTime;
- this.tasksSnapshot = tasksSnapshot;
- }
-
- /**
- * Gets time these statistics were collected.
- *
- * @return The statistics collection time as number of milliseconds
- * since January 1, 1970, 00:00:00 GMT.
- */
- long getSnapshotTime() {
- return snapShotTime;
- }
-
- /**
- * Gets the name of the data source associated with the ingest job that
- * is the subject of this snapshot.
- *
- * @return A data source name string.
- */
- String getDataSource() {
- return dataSource;
- }
-
- /**
- * Gets the identifier of the ingest job that is the subject of this
- * snapshot.
- *
- * @return The ingest job id.
- */
- long getJobId() {
- return this.jobId;
- }
-
- /**
- * Gets the time the ingest job was started.
- *
- * @return The start time as number of milliseconds since January 1,
- * 1970, 00:00:00 GMT.
- */
- long getJobStartTime() {
- return jobStartTime;
- }
-
- DataSourceIngestPipeline.PipelineModule getDataSourceLevelIngestModule() {
- return this.dataSourceLevelIngestModule;
- }
-
- boolean getFileIngestIsRunning() {
- return this.fileIngestRunning;
- }
-
- Date getFileIngestStartTime() {
- return this.fileIngestStartTime;
- }
-
- /**
- * Gets files per second throughput since the ingest job that is the
- * subject of this snapshot started.
- *
- * @return Files processed per second (approximate).
- */
- double getSpeed() {
- return (double) processedFiles / ((snapShotTime - jobStartTime) / 1000);
- }
-
- /**
- * Gets the number of files processed for the job so far.
- *
- * @return The number of processed files.
- */
- long getFilesProcessed() {
- return processedFiles;
- }
-
- /**
- * Gets an estimate of the files that still need to be processed for
- * this job.
- *
- * @return The estimate.
- */
- long getFilesEstimated() {
- return estimatedFilesToProcess;
- }
-
- long getRootQueueSize() {
- if (null == this.tasksSnapshot) {
- return 0;
- }
- return this.tasksSnapshot.getRootQueueSize();
- }
-
- long getDirQueueSize() {
- if (null == this.tasksSnapshot) {
- return 0;
- }
- return this.tasksSnapshot.getDirectoryTasksQueueSize();
- }
-
- long getFileQueueSize() {
- if (null == this.tasksSnapshot) {
- return 0;
- }
- return this.tasksSnapshot.getFileQueueSize();
- }
-
- long getDsQueueSize() {
- if (null == this.tasksSnapshot) {
- return 0;
- }
- return this.tasksSnapshot.getDsQueueSize();
- }
-
- long getRunningListSize() {
- if (null == this.tasksSnapshot) {
- return 0;
- }
- return this.tasksSnapshot.getRunningListSize();
- }
-
- boolean isCancelled() {
- return this.jobCancelled;
- }
-
- /**
- * Gets the reason this job was cancelled.
- *
- * @return The cancellation reason, may be not cancelled.
- */
- IngestJob.CancellationReason getCancellationReason() {
- return this.jobCancellationReason;
- }
-
- /**
- * Gets a list of the display names of any canceled data source level
- * ingest modules
- *
- * @return A list of canceled data source level ingest module display
- * names, possibly empty.
- */
- List getCancelledDataSourceIngestModules() {
- return Collections.unmodifiableList(this.cancelledDataSourceModules);
- }
-
- }
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java
index e345db3d65..95696378b7 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java
@@ -67,6 +67,8 @@ import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisStartedEvent;
import org.sleuthkit.autopsy.ingest.events.FileAnalyzedEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.DataSource;
+import org.sleuthkit.datamodel.TskCoreException;
/**
* Manages the creation and execution of ingest jobs, i.e., the processing of
@@ -285,6 +287,32 @@ public class IngestManager implements IngestProgressSnapshotProvider {
caseIsOpen = false;
clearIngestMessageBox();
}
+
+ /**
+ * Creates an ingest stream from the given ingest settings for a data source.
+ *
+ * @param dataSource The data source
+ * @param settings The ingest job settings.
+ *
+ * @return The newly created ingest stream.
+ *
+ * @throws TskCoreException if there was an error starting the ingest job.
+ */
+ public IngestStream openIngestStream(DataSource dataSource, IngestJobSettings settings) throws TskCoreException {
+ IngestJob job = new IngestJob(dataSource, IngestJob.Mode.STREAMING, settings);
+ IngestJobInputStream stream = new IngestJobInputStream(job);
+ if (stream.getIngestJobStartResult().getJob() != null) {
+ return stream;
+ } else if (stream.getIngestJobStartResult().getModuleErrors().isEmpty()) {
+ for (IngestModuleError error : stream.getIngestJobStartResult().getModuleErrors()) {
+ logger.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), dataSource.getName()), error.getThrowable());
+ }
+ throw new TskCoreException("Error starting ingest modules");
+ } else {
+ throw new TskCoreException("Error starting ingest modules", stream.getIngestJobStartResult().getStartupException());
+ }
+ }
+
/**
* Gets the number of file ingest threads the ingest manager is using to do
@@ -304,7 +332,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/
public void queueIngestJob(Collection dataSources, IngestJobSettings settings) {
if (caseIsOpen) {
- IngestJob job = new IngestJob(dataSources, settings, RuntimeProperties.runningWithGUI());
+ IngestJob job = new IngestJob(dataSources, settings);
if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet();
Future task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
@@ -323,7 +351,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/
public void queueIngestJob(Content dataSource, List files, IngestJobSettings settings) {
if (caseIsOpen) {
- IngestJob job = new IngestJob(dataSource, files, settings, RuntimeProperties.runningWithGUI());
+ IngestJob job = new IngestJob(dataSource, files, settings);
if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet();
Future task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
@@ -333,7 +361,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
}
/**
- * Immdiately starts an ingest job for one or more data sources.
+ * Immediately starts an ingest job for one or more data sources.
*
* @param dataSources The data sources to process.
* @param settings The settings for the ingest job.
@@ -343,7 +371,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/
public IngestJobStartResult beginIngestJob(Collection dataSources, IngestJobSettings settings) {
if (caseIsOpen) {
- IngestJob job = new IngestJob(dataSources, settings, RuntimeProperties.runningWithGUI());
+ IngestJob job = new IngestJob(dataSources, settings);
if (job.hasIngestPipeline()) {
return startIngestJob(job);
}
@@ -366,7 +394,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
"IngestManager.startupErr.dlgSolution=Please disable the failed modules or fix the errors before restarting ingest.",
"IngestManager.startupErr.dlgErrorList=Errors:"
})
- private IngestJobStartResult startIngestJob(IngestJob job) {
+ IngestJobStartResult startIngestJob(IngestJob job) {
List errors = null;
Case openCase;
try {
@@ -730,7 +758,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* the task.
*/
void setIngestTaskProgress(DataSourceIngestTask task, String ingestModuleDisplayName) {
- ingestThreadActivitySnapshots.put(task.getThreadId(), new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJob().getId(), ingestModuleDisplayName, task.getDataSource()));
+ ingestThreadActivitySnapshots.put(task.getThreadId(), new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getId(), ingestModuleDisplayName, task.getDataSource()));
}
/**
@@ -746,7 +774,14 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/
void setIngestTaskProgress(FileIngestTask task, String ingestModuleDisplayName) {
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
- IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJob().getId(), ingestModuleDisplayName, task.getDataSource(), task.getFile());
+ IngestThreadActivitySnapshot newSnap;
+ try {
+ newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getId(), ingestModuleDisplayName, task.getDataSource(), task.getFile());
+ } catch (TskCoreException ex) {
+ // In practice, this task would never have been enqueued or processed since the file
+ // lookup would have failed.
+ newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getId(), ingestModuleDisplayName, task.getDataSource());
+ }
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);
incrementModuleRunTime(prevSnap.getActivity(), newSnap.getStartTime().getTime() - prevSnap.getStartTime().getTime());
}
@@ -828,8 +863,8 @@ public class IngestManager implements IngestProgressSnapshotProvider {
* @return A list of ingest job state snapshots.
*/
@Override
- public List getIngestJobSnapshots() {
- List snapShots = new ArrayList<>();
+ public List getIngestJobSnapshots() {
+ List snapShots = new ArrayList<>();
synchronized (ingestJobsById) {
ingestJobsById.values().forEach((job) -> {
snapShots.addAll(job.getDataSourceIngestJobSnapshots());
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotPanel.java
index 44c508c75c..cf1fefb2d4 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotPanel.java
@@ -182,8 +182,10 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
NbBundle.getMessage(this.getClass(),
"IngestJobTableModel.colName.rootQueued"),
NbBundle.getMessage(this.getClass(),
+ "IngestJobTableModel.colName.streamingQueued"),
+ NbBundle.getMessage(this.getClass(),
"IngestJobTableModel.colName.dsQueued")};
- private List jobSnapshots;
+ private List jobSnapshots;
private IngestJobTableModel() {
refresh();
@@ -211,7 +213,7 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
@Override
public Object getValueAt(int rowIndex, int columnIndex) {
- DataSourceIngestJob.Snapshot snapShot = jobSnapshots.get(rowIndex);
+ Snapshot snapShot = jobSnapshots.get(rowIndex);
Object cellValue;
switch (columnIndex) {
case 0:
@@ -243,6 +245,9 @@ class IngestProgressSnapshotPanel extends javax.swing.JPanel {
cellValue = snapShot.getRootQueueSize();
break;
case 9:
+ cellValue = snapShot.getStreamingQueueSize();
+ break;
+ case 10:
cellValue = snapShot.getDsQueueSize();
break;
default:
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotProvider.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotProvider.java
index 511939e1f9..fc085615c7 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotProvider.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestProgressSnapshotProvider.java
@@ -38,7 +38,7 @@ public interface IngestProgressSnapshotProvider {
*
* @return A list of ingest job snapshots.
*/
- List getIngestJobSnapshots();
+ List getIngestJobSnapshots();
/**
* Gets the cumulative run times for the ingest module.
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java
new file mode 100644
index 0000000000..62a42af208
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestStream.java
@@ -0,0 +1,64 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.ingest;
+
+import java.util.List;
+
+/**
+ * Interface for classes that handle adding files from a data source processor
+ * to the ingest pipeline.
+ */
+public interface IngestStream {
+
+ /**
+ * Adds a set of file object IDs that are ready for ingest.
+ *
+ * @param fileObjectIds List of file object IDs.
+ *
+ * @throws IngestStreamClosedException
+ */
+ void addFiles(List fileObjectIds) throws IngestStreamClosedException;
+
+ /**
+ * Closes the ingest stream. Should be called after all files from data
+ * source have been sent to the stream.
+ */
+ void close();
+
+ /**
+ * Check whether the ingest stream is closed.
+ *
+ * @return True if closed, false otherwise.
+ */
+ boolean isClosed();
+
+ /**
+ * Stops the ingest stream. The stream will no longer accept new data and
+ * should no longer be used. Will also close the ingest stream.
+ */
+ void stop();
+
+ /**
+ * Check whether the ingest stream was stopped before completion. If this
+ * returns true, data should not be written to or read from the stream.
+ *
+ * @return True if the ingest stream was stopped, false otherwise.
+ */
+ boolean wasStopped();
+}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestStreamClosedException.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestStreamClosedException.java
new file mode 100644
index 0000000000..0eb82c4556
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestStreamClosedException.java
@@ -0,0 +1,48 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.ingest;
+
+/**
+ *
+ */
+public class IngestStreamClosedException extends Exception {
+
+ private static final long serialVersionUID = 1L;
+
+ /**
+ * Constructs and instance of the custom exception class for the
+ * IngestStream.
+ *
+ * @param message Exception message.
+ */
+ IngestStreamClosedException(String message) {
+ super(message);
+ }
+
+ /**
+ * Constructs and instance of the custom exception class for the
+ * IngestStream.
+ *
+ * @param message Exception message.
+ * @param throwable Exception cause.
+ */
+ IngestStreamClosedException(String message, Throwable throwable) {
+ super(message, throwable);
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java
index de5e750244..ebfabf2ab2 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java
@@ -23,20 +23,20 @@ import org.sleuthkit.datamodel.Content;
abstract class IngestTask {
private final static long NOT_SET = Long.MIN_VALUE;
- private final DataSourceIngestJob job;
+ private final IngestJobPipeline ingestJobPipeline;
private long threadId;
- IngestTask(DataSourceIngestJob job) {
- this.job = job;
+ IngestTask(IngestJobPipeline ingestJobPipeline) {
+ this.ingestJobPipeline = ingestJobPipeline;
threadId = NOT_SET;
}
- DataSourceIngestJob getIngestJob() {
- return job;
+ IngestJobPipeline getIngestJobPipeline() {
+ return ingestJobPipeline;
}
Content getDataSource() {
- return getIngestJob().getDataSource();
+ return getIngestJobPipeline().getDataSource();
}
long getThreadId() {
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java
index 57da5330c0..2ec96915cc 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java
@@ -27,6 +27,7 @@ import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Queue;
import java.util.TreeSet;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
@@ -58,6 +59,8 @@ final class IngestTasksScheduler {
private final TreeSet rootFileTaskQueue;
@GuardedBy("this")
private final Deque pendingFileTaskQueue;
+ @GuardedBy("this")
+ private final Queue streamedTasksQueue;
private final IngestTaskTrackingQueue fileIngestThreadsQueue;
/**
@@ -82,6 +85,7 @@ final class IngestTasksScheduler {
this.rootFileTaskQueue = new TreeSet<>(new RootDirectoryTaskComparator());
this.pendingFileTaskQueue = new LinkedList<>();
this.fileIngestThreadsQueue = new IngestTaskTrackingQueue();
+ this.streamedTasksQueue = new LinkedList<>();
}
/**
@@ -105,38 +109,38 @@ final class IngestTasksScheduler {
}
/**
- * Schedules a data source level ingest task and zero to many file level
- * ingest tasks for a data source ingest job.
+ * Schedules a data source level ingest task and zero to many file level
+ * ingest tasks for an ingest job pipeline.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingest job pipeline.
*/
- synchronized void scheduleIngestTasks(DataSourceIngestJob job) {
- if (!job.isCancelled()) {
+ synchronized void scheduleIngestTasks(IngestJobPipeline ingestJobPipeline) {
+ if (!ingestJobPipeline.isCancelled()) {
/*
* Scheduling of both the data source ingest task and the initial
- * file ingest tasks for a job must be an atomic operation.
+ * file ingest tasks for an ingestJobPipeline must be an atomic operation.
* Otherwise, the data source task might be completed before the
* file tasks are scheduled, resulting in a potential false positive
* when another thread checks whether or not all the tasks for the
- * job are completed.
+ * ingestJobPipeline are completed.
*/
- this.scheduleDataSourceIngestTask(job);
- this.scheduleFileIngestTasks(job, Collections.emptyList());
+ this.scheduleDataSourceIngestTask(ingestJobPipeline);
+ this.scheduleFileIngestTasks(ingestJobPipeline, Collections.emptyList());
}
}
/**
- * Schedules a data source level ingest task for a data source ingest job.
+ * Schedules a data source level ingest task for an ingest job pipeline.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingest job pipeline.
*/
- synchronized void scheduleDataSourceIngestTask(DataSourceIngestJob job) {
- if (!job.isCancelled()) {
- DataSourceIngestTask task = new DataSourceIngestTask(job);
+ synchronized void scheduleDataSourceIngestTask(IngestJobPipeline ingestJobPipeline) {
+ if (!ingestJobPipeline.isCancelled()) {
+ DataSourceIngestTask task = new DataSourceIngestTask(ingestJobPipeline);
try {
this.dataSourceIngestThreadQueue.putLast(task);
} catch (InterruptedException ex) {
- IngestTasksScheduler.logger.log(Level.INFO, String.format("Ingest tasks scheduler interrupted while blocked adding a task to the data source level ingest task queue (jobId={%d)", job.getId()), ex);
+ IngestTasksScheduler.logger.log(Level.INFO, String.format("Ingest tasks scheduler interrupted while blocked adding a task to the data source level ingest task queue (jobId={%d)", ingestJobPipeline.getId()), ex);
Thread.currentThread().interrupt();
}
}
@@ -144,40 +148,59 @@ final class IngestTasksScheduler {
/**
* Schedules file tasks for either all the files or a given subset of the
- * files for a data source source ingest job.
+ * files for an ingest job pipeline.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingest job pipeline.
* @param files A subset of the files for the data source; if empty, then
* file tasks for all files in the data source are scheduled.
*/
- synchronized void scheduleFileIngestTasks(DataSourceIngestJob job, Collection files) {
- if (!job.isCancelled()) {
+ synchronized void scheduleFileIngestTasks(IngestJobPipeline ingestJobPipeline, Collection files) {
+ if (!ingestJobPipeline.isCancelled()) {
Collection candidateFiles;
if (files.isEmpty()) {
- candidateFiles = getTopLevelFiles(job.getDataSource());
+ candidateFiles = getTopLevelFiles(ingestJobPipeline.getDataSource());
} else {
candidateFiles = files;
}
for (AbstractFile file : candidateFiles) {
- FileIngestTask task = new FileIngestTask(job, file);
+ FileIngestTask task = new FileIngestTask(ingestJobPipeline, file);
if (IngestTasksScheduler.shouldEnqueueFileTask(task)) {
this.rootFileTaskQueue.add(task);
}
}
- shuffleFileTaskQueues();
+ refillIngestThreadQueue();
}
}
+
+ /**
+ * Schedules file tasks for the given list of file IDs.
+ *
+ * @param ingestJobPipeline The ingest job pipeline.
+ * @param files A subset of the files for the data source; if empty, then
+ * file tasks for all files in the data source are scheduled.
+ */
+ synchronized void scheduleStreamedFileIngestTasks(IngestJobPipeline ingestJobPipeline, List fileIds) {
+ if (!ingestJobPipeline.isCancelled()) {
+ for (long id : fileIds) {
+ // Create the file ingest task. Note that we do not do the shouldEnqueueFileTask()
+ // check here in order to delay loading the AbstractFile object.
+ FileIngestTask task = new FileIngestTask(ingestJobPipeline, id);
+ this.streamedTasksQueue.add(task);
+ }
+ refillIngestThreadQueue();
+ }
+ }
/**
- * Schedules file level ingest tasks for a given set of files for a data
- * source ingest job by adding them directly to the front of the file tasks
+ * Schedules file level ingest tasks for a given set of files for an ingest
+ * job pipeline by adding them directly to the front of the file tasks
* queue for the ingest manager's file ingest threads.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingestJobPipeline.
* @param files A set of files for the data source.
*/
- synchronized void fastTrackFileIngestTasks(DataSourceIngestJob job, Collection files) {
- if (!job.isCancelled()) {
+ synchronized void fastTrackFileIngestTasks(IngestJobPipeline ingestJobPipeline, Collection files) {
+ if (!ingestJobPipeline.isCancelled()) {
/*
* Put the files directly into the queue for the file ingest
* threads, if they pass the file filter for the job. The files are
@@ -187,12 +210,12 @@ final class IngestTasksScheduler {
* already in progress.
*/
for (AbstractFile file : files) {
- FileIngestTask fileTask = new FileIngestTask(job, file);
+ FileIngestTask fileTask = new FileIngestTask(ingestJobPipeline, file);
if (shouldEnqueueFileTask(fileTask)) {
try {
this.fileIngestThreadsQueue.putFirst(fileTask);
} catch (InterruptedException ex) {
- IngestTasksScheduler.logger.log(Level.INFO, String.format("Ingest tasks scheduler interrupted while scheduling file level ingest tasks (jobId={%d)", job.getId()), ex);
+ IngestTasksScheduler.logger.log(Level.INFO, String.format("Ingest tasks scheduler interrupted while scheduling file level ingest tasks (jobId={%d)", ingestJobPipeline.getId()), ex);
Thread.currentThread().interrupt();
return;
}
@@ -219,36 +242,39 @@ final class IngestTasksScheduler {
*/
synchronized void notifyTaskCompleted(FileIngestTask task) {
this.fileIngestThreadsQueue.taskCompleted(task);
- shuffleFileTaskQueues();
+ refillIngestThreadQueue();
}
/**
* Queries the task scheduler to determine whether or not all of the ingest
- * tasks for a data source ingest job have been completed.
+ * tasks for an ingest job pipeline have been completed.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingestJobPipeline.
*
* @return True or false.
*/
- synchronized boolean tasksForJobAreCompleted(DataSourceIngestJob job) {
- long jobId = job.getId();
+ synchronized boolean currentTasksAreCompleted(IngestJobPipeline ingestJobPipeline) {
+ long jobId = ingestJobPipeline.getId();
+
return !(this.dataSourceIngestThreadQueue.hasTasksForJob(jobId)
|| hasTasksForJob(this.rootFileTaskQueue, jobId)
|| hasTasksForJob(this.pendingFileTaskQueue, jobId)
+ || hasTasksForJob(this.streamedTasksQueue, jobId)
|| this.fileIngestThreadsQueue.hasTasksForJob(jobId));
}
/**
- * Clears the "upstream" task scheduling queues for a data source ingest
- * job, but does nothing about tasks that have already been moved into the
+ * Clears the "upstream" task scheduling queues for an ingest pipeline,
+ * but does nothing about tasks that have already been moved into the
* queue that is consumed by the file ingest threads.
*
- * @param job The data source ingest job.
+ * @param ingestJobPipeline The ingestJobPipeline.
*/
- synchronized void cancelPendingTasksForIngestJob(DataSourceIngestJob job) {
- long jobId = job.getId();
- IngestTasksScheduler.removeTasksForJob(this.rootFileTaskQueue, jobId);
- IngestTasksScheduler.removeTasksForJob(this.pendingFileTaskQueue, jobId);
+ synchronized void cancelPendingTasksForIngestJob(IngestJobPipeline ingestJobPipeline) {
+ long jobId = ingestJobPipeline.getId();
+ IngestTasksScheduler.removeTasksForJob(rootFileTaskQueue, jobId);
+ IngestTasksScheduler.removeTasksForJob(pendingFileTaskQueue, jobId);
+ IngestTasksScheduler.removeTasksForJob(streamedTasksQueue, jobId);
}
/**
@@ -291,6 +317,48 @@ final class IngestTasksScheduler {
}
return topLevelFiles;
}
+
+ /**
+ * Schedules file ingest tasks for the ingest manager's file ingest threads.
+ * Files from streaming ingest will be prioritized.
+ */
+ synchronized private void refillIngestThreadQueue() {
+ try {
+ takeFromStreamingTaskQueue();
+ takeFromBatchTasksQueues();
+ } catch (InterruptedException ex) {
+ IngestTasksScheduler.logger.log(Level.INFO, "Ingest tasks scheduler interrupted while blocked adding a task to the file level ingest task queue", ex);
+ Thread.currentThread().interrupt();
+ }
+ }
+
+ /**
+ * Move tasks from the streamedTasksQueue into the fileIngestThreadsQueue.
+ * Will attempt to move as many tasks as there are ingest threads.
+ */
+ synchronized private void takeFromStreamingTaskQueue() throws InterruptedException {
+ /*
+ * Schedule files from the streamedTasksQueue
+ */
+ while (fileIngestThreadsQueue.isEmpty()) {
+ /*
+ * We will attempt to schedule as many tasks as there are ingest
+ * queues.
+ */
+ int taskCount = 0;
+ while (taskCount < IngestManager.getInstance().getNumberOfFileIngestThreads()) {
+ final FileIngestTask streamingTask = streamedTasksQueue.poll();
+ if (streamingTask == null) {
+ return; // No streaming tasks are queued right now
+ }
+
+ if (shouldEnqueueFileTask(streamingTask)) {
+ fileIngestThreadsQueue.putLast(streamingTask);
+ taskCount++;
+ }
+ }
+ }
+ }
/**
* Schedules file ingest tasks for the ingest manager's file ingest threads
@@ -322,8 +390,9 @@ final class IngestTasksScheduler {
* during ingest. The reason for the LIFO additions is to give priority to
* files derived from prioritized files.
*/
- synchronized private void shuffleFileTaskQueues() {
- while (this.fileIngestThreadsQueue.isEmpty()) {
+ synchronized private void takeFromBatchTasksQueues() throws InterruptedException {
+
+ while (this.fileIngestThreadsQueue.isEmpty()) {
/*
* If the pending file task queue is empty, move the highest
* priority root file task, if there is one, into it.
@@ -345,17 +414,11 @@ final class IngestTasksScheduler {
return;
}
if (shouldEnqueueFileTask(pendingTask)) {
- try {
- /*
- * The task is added to the queue for the ingest threads
- * AFTER the higher priority tasks that preceded it.
- */
- this.fileIngestThreadsQueue.putLast(pendingTask);
- } catch (InterruptedException ex) {
- IngestTasksScheduler.logger.log(Level.INFO, "Ingest tasks scheduler interrupted while blocked adding a task to the file level ingest task queue", ex);
- Thread.currentThread().interrupt();
- return;
- }
+ /*
+ * The task is added to the queue for the ingest threads
+ * AFTER the higher priority tasks that preceded it.
+ */
+ this.fileIngestThreadsQueue.putLast(pendingTask);
}
/*
@@ -365,27 +428,27 @@ final class IngestTasksScheduler {
* own, or into the queue for the file ingest threads, if it passes
* the filter for the job.
*/
- final AbstractFile file = pendingTask.getFile();
+ AbstractFile file = null;
try {
+ file = pendingTask.getFile();
for (Content child : file.getChildren()) {
if (child instanceof AbstractFile) {
AbstractFile childFile = (AbstractFile) child;
- FileIngestTask childTask = new FileIngestTask(pendingTask.getIngestJob(), childFile);
+ FileIngestTask childTask = new FileIngestTask(pendingTask.getIngestJobPipeline(), childFile);
if (childFile.hasChildren()) {
this.pendingFileTaskQueue.add(childTask);
} else if (shouldEnqueueFileTask(childTask)) {
- try {
- this.fileIngestThreadsQueue.putLast(childTask);
- } catch (InterruptedException ex) {
- IngestTasksScheduler.logger.log(Level.INFO, "Ingest tasks scheduler interrupted while blocked adding a task to the file level ingest task queue", ex);
- Thread.currentThread().interrupt();
- return;
- }
+ this.fileIngestThreadsQueue.putLast(childTask);
}
}
}
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, String.format("Error getting the children of %s (objId=%d)", file.getName(), file.getId()), ex); //NON-NLS
+ if (file != null) {
+ logger.log(Level.SEVERE, String.format("Error getting the children of %s (objId=%d)", file.getName(), file.getId()), ex); //NON-NLS
+ } else {
+ // In practice, the task would have already returned false from the call to shouldEnqueueFileTask()
+ logger.log(Level.SEVERE, "Error loading file with object ID {0}", pendingTask.getFileId());
+ }
}
}
}
@@ -400,7 +463,13 @@ final class IngestTasksScheduler {
* @return True or false.
*/
private static boolean shouldEnqueueFileTask(final FileIngestTask task) {
- final AbstractFile file = task.getFile();
+ AbstractFile file;
+ try {
+ file = task.getFile();
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Error loading file with ID {0}", task.getFileId());
+ return false;
+ }
// Skip the task if the file is actually the pseudo-file for the parent
// or current directory.
@@ -483,7 +552,12 @@ final class IngestTasksScheduler {
* @return True or false.
*/
private static boolean shouldBeCarved(final FileIngestTask task) {
- return task.getIngestJob().shouldProcessUnallocatedSpace() && task.getFile().getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
+ try {
+ AbstractFile file = task.getFile();
+ return task.getIngestJobPipeline().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
+ } catch (TskCoreException ex) {
+ return false;
+ }
}
/**
@@ -495,7 +569,12 @@ final class IngestTasksScheduler {
* @return True or false.
*/
private static boolean fileAcceptedByFilter(final FileIngestTask task) {
- return !(task.getIngestJob().getFileIngestFilter().fileIsMemberOf(task.getFile()) == null);
+ try {
+ AbstractFile file = task.getFile();
+ return !(task.getIngestJobPipeline().getFileIngestFilter().fileIsMemberOf(file) == null);
+ } catch (TskCoreException ex) {
+ return false;
+ }
}
/**
@@ -509,7 +588,7 @@ final class IngestTasksScheduler {
*/
synchronized private static boolean hasTasksForJob(Collection extends IngestTask> tasks, long jobId) {
for (IngestTask task : tasks) {
- if (task.getIngestJob().getId() == jobId) {
+ if (task.getIngestJobPipeline().getId() == jobId) {
return true;
}
}
@@ -527,7 +606,7 @@ final class IngestTasksScheduler {
Iterator extends IngestTask> iterator = tasks.iterator();
while (iterator.hasNext()) {
IngestTask task = iterator.next();
- if (task.getIngestJob().getId() == jobId) {
+ if (task.getIngestJobPipeline().getId() == jobId) {
iterator.remove();
}
}
@@ -544,7 +623,7 @@ final class IngestTasksScheduler {
private static int countTasksForJob(Collection extends IngestTask> queue, long jobId) {
int count = 0;
for (IngestTask task : queue) {
- if (task.getIngestJob().getId() == jobId) {
+ if (task.getIngestJobPipeline().getId() == jobId) {
count++;
}
}
@@ -564,7 +643,8 @@ final class IngestTasksScheduler {
countTasksForJob(this.rootFileTaskQueue, jobId),
countTasksForJob(this.pendingFileTaskQueue, jobId),
this.fileIngestThreadsQueue.countQueuedTasksForJob(jobId),
- this.dataSourceIngestThreadQueue.countRunningTasksForJob(jobId) + this.fileIngestThreadsQueue.countRunningTasksForJob(jobId));
+ this.dataSourceIngestThreadQueue.countRunningTasksForJob(jobId) + this.fileIngestThreadsQueue.countRunningTasksForJob(jobId),
+ countTasksForJob(this.streamedTasksQueue, jobId));
}
/**
@@ -575,10 +655,36 @@ final class IngestTasksScheduler {
@Override
public int compare(FileIngestTask q1, FileIngestTask q2) {
- AbstractFilePriority.Priority p1 = AbstractFilePriority.getPriority(q1.getFile());
- AbstractFilePriority.Priority p2 = AbstractFilePriority.getPriority(q2.getFile());
+ // In practice the case where one or both calls to getFile() fails
+ // should never occur since such tasks would not be added to the queue.
+ AbstractFile file1 = null;
+ AbstractFile file2 = null;
+ try {
+ file1 = q1.getFile();
+ } catch (TskCoreException ex) {
+ // Do nothing - the exception has been logged elsewhere
+ }
+
+ try {
+ file2 = q2.getFile();
+ } catch (TskCoreException ex) {
+ // Do nothing - the exception has been logged elsewhere
+ }
+
+ if (file1 == null) {
+ if (file2 == null) {
+ return (int) (q2.getFileId() - q1.getFileId());
+ } else {
+ return 1;
+ }
+ } else if (file2 == null) {
+ return -1;
+ }
+
+ AbstractFilePriority.Priority p1 = AbstractFilePriority.getPriority(file1);
+ AbstractFilePriority.Priority p2 = AbstractFilePriority.getPriority(file2);
if (p1 == p2) {
- return (int) (q2.getFile().getId() - q1.getFile().getId());
+ return (int) (file2.getId() - file1.getId());
} else {
return p2.ordinal() - p1.ordinal();
}
@@ -843,19 +949,22 @@ final class IngestTasksScheduler {
private final long dirQueueSize;
private final long fileQueueSize;
private final long runningListSize;
+ private final long streamingQueueSize;
/**
* Constructs a snapshot of ingest tasks data for an ingest job.
*
* @param jobId The identifier associated with the job.
*/
- IngestJobTasksSnapshot(long jobId, long dsQueueSize, long rootQueueSize, long dirQueueSize, long fileQueueSize, long runningListSize) {
+ IngestJobTasksSnapshot(long jobId, long dsQueueSize, long rootQueueSize, long dirQueueSize, long fileQueueSize,
+ long runningListSize, long streamingQueueSize) {
this.jobId = jobId;
this.dsQueueSize = dsQueueSize;
this.rootQueueSize = rootQueueSize;
this.dirQueueSize = dirQueueSize;
this.fileQueueSize = fileQueueSize;
this.runningListSize = runningListSize;
+ this.streamingQueueSize = streamingQueueSize;
}
/**
@@ -891,6 +1000,10 @@ final class IngestTasksScheduler {
long getFileQueueSize() {
return fileQueueSize;
}
+
+ long getStreamingQueueSize() {
+ return streamingQueueSize;
+ }
long getDsQueueSize() {
return dsQueueSize;
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Snapshot.java b/Core/src/org/sleuthkit/autopsy/ingest/Snapshot.java
new file mode 100644
index 0000000000..19a0e41c35
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/ingest/Snapshot.java
@@ -0,0 +1,220 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2014-2020 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.ingest;
+
+import java.io.Serializable;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+
+/**
+ * Stores basic diagnostic statistics for a data source ingest job.
+ */
+public final class Snapshot implements Serializable {
+
+ private static final long serialVersionUID = 1L;
+
+ private final String dataSource;
+ private final long jobId;
+ private final long jobStartTime;
+ private final long snapShotTime;
+ transient private final DataSourceIngestPipeline.PipelineModule dataSourceLevelIngestModule;
+ private final boolean fileIngestRunning;
+ private final Date fileIngestStartTime;
+ private final long processedFiles;
+ private final long estimatedFilesToProcess;
+ private final IngestTasksScheduler.IngestJobTasksSnapshot tasksSnapshot;
+ transient private final boolean jobCancelled;
+ transient private final IngestJob.CancellationReason jobCancellationReason;
+ transient private final List cancelledDataSourceModules;
+
+ /**
+ * Constructs an object to store basic diagnostic statistics for a data
+ * source ingest job.
+ */
+ Snapshot(String dataSourceName, long jobId, long jobStartTime, DataSourceIngestPipeline.PipelineModule dataSourceIngestModule,
+ boolean fileIngestRunning, Date fileIngestStartTime,
+ boolean jobCancelled, IngestJob.CancellationReason cancellationReason, List cancelledModules,
+ long processedFiles, long estimatedFilesToProcess,
+ long snapshotTime, IngestTasksScheduler.IngestJobTasksSnapshot tasksSnapshot) {
+ this.dataSource = dataSourceName;
+ this.jobId = jobId;
+ this.jobStartTime = jobStartTime;
+ this.dataSourceLevelIngestModule = dataSourceIngestModule;
+
+ this.fileIngestRunning = fileIngestRunning;
+ this.fileIngestStartTime = fileIngestStartTime;
+ this.jobCancelled = jobCancelled;
+ this.jobCancellationReason = cancellationReason;
+ this.cancelledDataSourceModules = cancelledModules;
+
+ this.processedFiles = processedFiles;
+ this.estimatedFilesToProcess = estimatedFilesToProcess;
+ this.snapShotTime = snapshotTime;
+ this.tasksSnapshot = tasksSnapshot;
+ }
+
+ /**
+ * Gets time these statistics were collected.
+ *
+ * @return The statistics collection time as number of milliseconds since
+ * January 1, 1970, 00:00:00 GMT.
+ */
+ long getSnapshotTime() {
+ return snapShotTime;
+ }
+
+ /**
+ * Gets the name of the data source associated with the ingest job that is
+ * the subject of this snapshot.
+ *
+ * @return A data source name string.
+ */
+ String getDataSource() {
+ return dataSource;
+ }
+
+ /**
+ * Gets the identifier of the ingest job that is the subject of this
+ * snapshot.
+ *
+ * @return The ingest job id.
+ */
+ long getJobId() {
+ return this.jobId;
+ }
+
+ /**
+ * Gets the time the ingest job was started.
+ *
+ * @return The start time as number of milliseconds since January 1, 1970,
+ * 00:00:00 GMT.
+ */
+ long getJobStartTime() {
+ return jobStartTime;
+ }
+
+ DataSourceIngestPipeline.PipelineModule getDataSourceLevelIngestModule() {
+ return this.dataSourceLevelIngestModule;
+ }
+
+ boolean getFileIngestIsRunning() {
+ return this.fileIngestRunning;
+ }
+
+ Date getFileIngestStartTime() {
+ return this.fileIngestStartTime;
+ }
+
+ /**
+ * Gets files per second throughput since the ingest job that is the subject
+ * of this snapshot started.
+ *
+ * @return Files processed per second (approximate).
+ */
+ double getSpeed() {
+ return (double) processedFiles / ((snapShotTime - jobStartTime) / 1000);
+ }
+
+ /**
+ * Gets the number of files processed for the job so far.
+ *
+ * @return The number of processed files.
+ */
+ long getFilesProcessed() {
+ return processedFiles;
+ }
+
+ /**
+ * Gets an estimate of the files that still need to be processed for this
+ * job.
+ *
+ * @return The estimate.
+ */
+ long getFilesEstimated() {
+ return estimatedFilesToProcess;
+ }
+
+ long getRootQueueSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getRootQueueSize();
+ }
+
+ long getDirQueueSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getDirectoryTasksQueueSize();
+ }
+
+ long getFileQueueSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getFileQueueSize();
+ }
+
+ long getDsQueueSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getDsQueueSize();
+ }
+
+ long getStreamingQueueSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getStreamingQueueSize();
+ }
+
+ long getRunningListSize() {
+ if (null == this.tasksSnapshot) {
+ return 0;
+ }
+ return this.tasksSnapshot.getRunningListSize();
+ }
+
+ boolean isCancelled() {
+ return this.jobCancelled;
+ }
+
+ /**
+ * Gets the reason this job was cancelled.
+ *
+ * @return The cancellation reason, may be not cancelled.
+ */
+ IngestJob.CancellationReason getCancellationReason() {
+ return this.jobCancellationReason;
+ }
+
+ /**
+ * Gets a list of the display names of any canceled data source level ingest
+ * modules
+ *
+ * @return A list of canceled data source level ingest module display names,
+ * possibly empty.
+ */
+ List getCancelledDataSourceIngestModules() {
+ return Collections.unmodifiableList(this.cancelledDataSourceModules);
+ }
+
+}
diff --git a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddMultipleImagesTask.java b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddMultipleImagesTask.java
index bedf15bdae..f5a2f2bd8c 100644
--- a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddMultipleImagesTask.java
+++ b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddMultipleImagesTask.java
@@ -29,6 +29,7 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.DefaultAddDataSourceCallbacks;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitJNI;
@@ -60,6 +61,7 @@ class AddMultipleImagesTask implements Runnable {
private List errorMessages = new ArrayList<>();
private DataSourceProcessorResult result;
private List newDataSources = new ArrayList<>();
+ private Image currentImage = null;
/*
* The cancellation requested flag and SleuthKit add image process are
@@ -105,6 +107,8 @@ class AddMultipleImagesTask implements Runnable {
@Messages({
"AddMultipleImagesTask.cancelled=Cancellation: Add image process reverted",
+ "# {0} - image path",
+ "AddMultipleImagesTask.imageError=Error adding image {0} to the database"
})
@Override
public void run() {
@@ -118,15 +122,25 @@ class AddMultipleImagesTask implements Runnable {
List corruptedImageFilePaths = new ArrayList<>();
progressMonitor.setIndeterminate(true);
for (String imageFilePath : imageFilePaths) {
+ try {
+ currentImage = SleuthkitJNI.addImageToDatabase(currentCase.getSleuthkitCase(), new String[]{imageFilePath},
+ 0, timeZone, "", "", "", deviceId);
+ } catch (TskCoreException ex) {
+ LOGGER.log(Level.SEVERE, "Error adding image " + imageFilePath + " to database", ex);
+ errorMessages.add(Bundle.AddMultipleImagesTask_imageError(imageFilePath));
+ result = DataSourceProcessorResult.CRITICAL_ERRORS;
+ }
+
synchronized (tskAddImageProcessLock) {
+
if (!tskAddImageProcessStopped) {
addImageProcess = currentCase.getSleuthkitCase().makeAddImageProcess(timeZone, false, false, "");
} else {
return;
}
}
- run(imageFilePath, corruptedImageFilePaths, errorMessages);
- commitOrRevertAddImageProcess(imageFilePath, errorMessages, newDataSources);
+ run(imageFilePath, currentImage, corruptedImageFilePaths, errorMessages);
+ finishAddImageProcess(imageFilePath, errorMessages, newDataSources);
synchronized (tskAddImageProcessLock) {
if (tskAddImageProcessStopped) {
errorMessages.add(Bundle.AddMultipleImagesTask_cancelled());
@@ -218,7 +232,8 @@ class AddMultipleImagesTask implements Runnable {
/**
* Attempts to add an input image to the case.
*
- * @param imageFilePath The image file path.
+ * @param imageFilePath Path to the image.
+ * @param image The image.
* @param corruptedImageFilePaths If the image cannot be added because
* Sleuth Kit cannot detect a filesystem,
* the image file path is added to this list
@@ -233,13 +248,13 @@ class AddMultipleImagesTask implements Runnable {
"# {0} - imageFilePath", "# {1} - deviceId", "# {2} - exceptionMessage", "AddMultipleImagesTask.criticalErrorAdding=Critical error adding {0} for device {1}: {2}",
"# {0} - imageFilePath", "# {1} - deviceId", "# {2} - exceptionMessage", "AddMultipleImagesTask.criticalErrorReverting=Critical error reverting add image process for {0} for device {1}: {2}",
"# {0} - imageFilePath", "# {1} - deviceId", "# {2} - exceptionMessage", "AddMultipleImagesTask.nonCriticalErrorAdding=Non-critical error adding {0} for device {1}: {2}",})
- private void run(String imageFilePath, List corruptedImageFilePaths, List errorMessages) {
+ private void run(String imageFilePath, Image image, List corruptedImageFilePaths, List errorMessages) {
/*
* Try to add the image to the case database as a data source.
*/
progressMonitor.setProgressText(Bundle.AddMultipleImagesTask_adding(imageFilePath));
try {
- addImageProcess.run(deviceId, new String[]{imageFilePath});
+ addImageProcess.run(deviceId, image, 0, new DefaultAddDataSourceCallbacks());
} catch (TskCoreException ex) {
if (ex.getMessage().contains(TSK_FS_TYPE_UNKNOWN_ERR_MSG)) {
/*
@@ -259,9 +274,9 @@ class AddMultipleImagesTask implements Runnable {
}
/**
- * Commits or reverts the results of the TSK add image process. If the
- * process was stopped before it completed or there was a critical error the
- * results are reverted, otherwise they are committed.
+ * Finishes TSK add image process.
+ * The image will always be in the database regardless of whether the user
+ * canceled or a critical error occurred.
*
* @param imageFilePath The image file path.
* @param errorMessages Error messages, if any, are added to this list for
@@ -270,44 +285,26 @@ class AddMultipleImagesTask implements Runnable {
* added to this list for eventual return via the
* getter method.
*/
- private void commitOrRevertAddImageProcess(String imageFilePath, List errorMessages, List newDataSources) {
- synchronized (tskAddImageProcessLock) {
- if (tskAddImageProcessStopped || criticalErrorOccurred) {
- try {
- addImageProcess.revert();
- } catch (TskCoreException ex) {
- errorMessages.add(Bundle.AddMultipleImagesTask_criticalErrorReverting(imageFilePath, deviceId, ex.getLocalizedMessage()));
- criticalErrorOccurred = true;
- }
+ private void finishAddImageProcess(String imageFilePath, List errorMessages, List newDataSources) {
+ synchronized (tskAddImageProcessLock) {
+ /*
+ * Add the new image to the list of new data
+ * sources to be returned via the getter method.
+ */
+ newDataSources.add(currentImage);
+
+ // Do no further processing if the user canceled
+ if (tskAddImageProcessStopped) {
return;
}
-
- /*
- * Try to commit the results of the add image process, retrieve the new
- * image from the case database, and add it to the list of new data
- * sources to be returned via the getter method.
- */
- try {
- long imageId = addImageProcess.commit();
- Image dataSource = currentCase.getSleuthkitCase().getImageById(imageId);
- newDataSources.add(dataSource);
- /*
- * Verify the size of the new image. Note that it may not be what is
- * expected, but at least part of it was added to the case.
- */
- String verificationError = dataSource.verifyImageSize();
- if (!verificationError.isEmpty()) {
- errorMessages.add(Bundle.AddMultipleImagesTask_nonCriticalErrorAdding(imageFilePath, deviceId, verificationError));
- }
- } catch (TskCoreException ex) {
- /*
- * The add image process commit failed or querying the case database
- * for the newly added image failed. Either way, this is a critical
- * error.
- */
- errorMessages.add(Bundle.AddMultipleImagesTask_criticalErrorAdding(imageFilePath, deviceId, ex.getLocalizedMessage()));
- criticalErrorOccurred = true;
+ /*
+ * Verify the size of the new image. Note that it may not be what is
+ * expected, but at least part of it was added to the case.
+ */
+ String verificationError = currentImage.verifyImageSize();
+ if (!verificationError.isEmpty()) {
+ errorMessages.add(Bundle.AddMultipleImagesTask_nonCriticalErrorAdding(imageFilePath, deviceId, verificationError));
}
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/Bundle.properties-MERGED
index b12ce63db5..a284e16eb4 100644
--- a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/Bundle.properties-MERGED
@@ -67,6 +67,8 @@ AddMultipleImagesTask.criticalErrorReverting=Critical error reverting add image
# {1} - exceptionMessage
AddMultipleImagesTask.errorAddingImgWithoutFileSystem=Error adding images without file systems for device {0}: {1}
AddMultipleImagesTask.fsTypeUnknownErr=Cannot determine file system type
+# {0} - image path
+AddMultipleImagesTask.imageError=Error adding image {0} to the database
# {0} - imageFilePath
# {1} - deviceId
# {2} - exceptionMessage
diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
index 7c69fa02cc..65bf047fa1 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
@@ -59,8 +59,6 @@ import org.sleuthkit.datamodel.TskException;
@Messages({
"HashDbIngestModule.noKnownBadHashDbSetMsg=No notable hash set.",
"HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn=Notable file search will not be executed.",
- "HashDbIngestModule.noChangeHashDbSetMsg=No 'No Change' hash set.",
- "HashDbIngestModule.noChangeFileSearchWillNotExecuteWarn='No Change' file search will not be executed.",
"HashDbIngestModule.noKnownHashDbSetMsg=No known hash set.",
"HashDbIngestModule.knownFileSearchWillNotExecuteWarn=Known file search will not be executed.",
"# {0} - fileName", "HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}.",
@@ -146,13 +144,6 @@ public class HashDbIngestModule implements FileIngestModule {
Bundle.HashDbIngestModule_noKnownBadHashDbSetMsg(),
Bundle.HashDbIngestModule_knownBadFileSearchWillNotExecuteWarn()));
}
-
- if (noChangeHashSets.isEmpty()) {
- services.postMessage(IngestMessage.createWarningMessage(
- HashLookupModuleFactory.getModuleName(),
- Bundle.HashDbIngestModule_noChangeHashDbSetMsg(),
- Bundle.HashDbIngestModule_noChangeFileSearchWillNotExecuteWarn()));
- }
if (knownHashSets.isEmpty()) {
services.postMessage(IngestMessage.createWarningMessage(
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED
index 5d005fab85..01660bb427 100755
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/Bundle.properties-MERGED
@@ -10,6 +10,11 @@ FilesSetDefsPanel.cancelNewSetMsg=Cancel
# {0} - file name
FilesSetDefsPanel.exportButtonActionPerformed.fileExistPrompt=File {0} exists, overwrite?
FilesSetDefsPanel.gigaBytes=Gigabytes
+# {0} - fileName
+# {1} - errorMessage
+FilesSetDefsPanel.importSetButtonActionPerformed.importError=The rules file "{0}" could not be read:\n"{1}."
+FilesSetDefsPanel.importSetButtonActionPerformed.noFiles=No files sets were found in the selected files.
+FilesSetDefsPanel.importSetButtonActionPerformed.noFilesSelected=No files sets were selected.
# {0} - filter name
# {1} - profile name
FilesSetDefsPanel.ingest.fileFilterInUseError=The selected file filter, {0}, is being used by a profile, {1}, and cannot be deleted while any profile uses it.
@@ -19,7 +24,6 @@ FilesSetDefsPanel.interesting.exportButtonAction.featureName=Interesting Files S
FilesSetDefsPanel.interesting.ExportedMsg=Interesting files set exported
FilesSetDefsPanel.interesting.exportSetButton.text=Export Set
FilesSetDefsPanel.interesting.failExportMsg=Export of interesting files set failed
-FilesSetDefsPanel.interesting.failImportMsg=Interesting files set not imported
FilesSetDefsPanel.interesting.fileExtensionFilterLbl=Autopsy Interesting File Set File (xml)
FilesSetDefsPanel.interesting.importButtonAction.featureName=Interesting Files Set Import
FilesSetDefsPanel.interesting.importOwConflict=Import Interesting files set conflict
@@ -59,6 +63,34 @@ FilesSetRulePanel.NoPathError=Path cannot be empty
FilesSetRulePanel.ZeroFileSizeError=File size condition value must not be 0 (Unless = is selected).
FilesSetsManager.allFilesAndDirectories=All Files and Directories (Not Unallocated Space)
FilesSetsManager.allFilesDirectoriesAndUnallocated=All Files, Directories, and Unallocated Space
+# {0} - regex
+InterestingItemsFilesSetSettings.readDateCondition.failedCompiledRegex=Error detmining ''{0}'' number
+# {0} - condition
+# {1} - rule
+InterestingItemsFilesSetSettings.readMetaTypeCondition.malformedXml=Files set is malformed for metatype condition, ''{0}'', in rule ''{1}''
+# {0} - regex
+# {1} - rule
+InterestingItemsFilesSetSettings.readNameCondition.errorCompilingRegex=Error compiling ''{0}'' regex in rule ''{1}''
+# {0} - character
+# {1} - rule
+InterestingItemsFilesSetSettings.readNameCondition.illegalChar=File name has illegal character of ''{0}'' in rule ''{1}''
+# {0} - tagName
+# {1} - ruleName
+InterestingItemsFilesSetSettings.readNameCondition.invalidTag=Name condition has invalid tag name of ''{0}'' for rule ''{1}''
+# {0} - regex
+InterestingItemsFilesSetSettings.readPathCondition.failedCompiledRegex=Error compiling ''{0}'' regex
+# {0} - ruleName
+InterestingItemsFilesSetSettings.readPathCondition.pathConditionCreationError=Error creating path condition for rule ''{0}''
+# {0} - ruleName
+InterestingItemsFilesSetSettings.readRule.missingNecessary=Invalid rule in files set, missing necessary conditions for ''{0}''
+# {0} - filePathStr
+InterestingItemsFilesSetSettings.readSerializedDefinitions.failedReadSettings=Failed to read settings from ''{0}''
+# {0} - rule
+InterestingItemsFilesSetSettings.readSizeCondition.invalidComparator=Invalid comparator or size unit in files set for rule ''{0}''
+# {0} - rule
+InterestingItemsFilesSetSettings.readSizeCondition.malformedXml=Files set is malformed missing at least one 'fileSize' attribute for rule ''{0}''
+# {0} - rule
+InterestingItemsFilesSetSettings.readSizeCondition.notIntegerValue=Non integer size in files set for rule ''{0}''
InterestingItemsIngestModuleFactory.defaultSettingsError=Error getting default interesting files settings from file.
OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Interesting Files Identifier ingest module. \n\n Identifies interesting files as defined by interesting files rule sets.
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetDefsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetDefsPanel.java
index 7e0b57a412..7b41f71e25 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetDefsPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetDefsPanel.java
@@ -1116,11 +1116,15 @@ public final class FilesSetDefsPanel extends IngestModuleGlobalSettingsPanel imp
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
}//GEN-LAST:event_copySetButtonActionPerformed
- @NbBundle.Messages({
+ @NbBundle.Messages({
"FilesSetDefsPanel.interesting.failImportMsg=Interesting files set not imported",
"FilesSetDefsPanel.interesting.fileExtensionFilterLbl=Autopsy Interesting File Set File (xml)",
- "FilesSetDefsPanel.interesting.importButtonAction.featureName=Interesting Files Set Import"
- })
+ "FilesSetDefsPanel.interesting.importButtonAction.featureName=Interesting Files Set Import",
+ "FilesSetDefsPanel.importSetButtonActionPerformed.noFilesSelected=No files sets were selected.",
+ "FilesSetDefsPanel.importSetButtonActionPerformed.noFiles=No files sets were found in the selected files.",
+ "# {0} - fileName",
+ "# {1} - errorMessage",
+ "FilesSetDefsPanel.importSetButtonActionPerformed.importError=The rules file \"{0}\" could not be read:\n{1}.",})
private void importSetButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importSetButtonActionPerformed
//save currently selected value as default value to select
FilesSet selectedSet = this.setsList.getSelectedValue();
@@ -1136,8 +1140,8 @@ public final class FilesSetDefsPanel extends IngestModuleGlobalSettingsPanel imp
File selFile = chooser.getSelectedFile();
if (selFile == null) {
JOptionPane.showMessageDialog(this,
- NbBundle.getMessage(this.getClass(), "FilesSetDefsPanel.interesting.failImportMsg"),
- NbBundle.getMessage(this.getClass(), "FilesSetDefsPanel.interesting.importButtonAction.featureName"),
+ Bundle.FilesSetDefsPanel_importSetButtonActionPerformed_noFilesSelected(),
+ Bundle.FilesSetDefsPanel_interesting_importButtonAction_featureName(),
JOptionPane.WARNING_MESSAGE);
logger.warning("Selected file was null, when trying to import interesting files set definitions");
return;
@@ -1146,12 +1150,17 @@ public final class FilesSetDefsPanel extends IngestModuleGlobalSettingsPanel imp
try {
importedSets = InterestingItemsFilesSetSettings.readDefinitionsXML(selFile).values(); //read the xml from that path
if (importedSets.isEmpty()) {
- throw new FilesSetsManager.FilesSetsManagerException("No Files Sets were read from the xml.");
+ JOptionPane.showMessageDialog(this,
+ Bundle.FilesSetDefsPanel_importSetButtonActionPerformed_noFiles(),
+ Bundle.FilesSetDefsPanel_interesting_importButtonAction_featureName(),
+ JOptionPane.WARNING_MESSAGE);
+ logger.log(Level.WARNING, "No Interesting files set definitions were read from the selected file");
+ return;
}
} catch (FilesSetsManager.FilesSetsManagerException ex) {
JOptionPane.showMessageDialog(this,
- NbBundle.getMessage(this.getClass(), "FilesSetDefsPanel.interesting.failImportMsg"),
- NbBundle.getMessage(this.getClass(), "FilesSetDefsPanel.interesting.importButtonAction.featureName"),
+ Bundle.FilesSetDefsPanel_importSetButtonActionPerformed_importError(selFile.getName(), ex.getMessage()),
+ Bundle.FilesSetDefsPanel_interesting_importButtonAction_featureName(),
JOptionPane.WARNING_MESSAGE);
logger.log(Level.WARNING, "No Interesting files set definitions were read from the selected file, exception", ex);
return;
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemsFilesSetSettings.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemsFilesSetSettings.java
index 6069aad3f6..863243a4e1 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemsFilesSetSettings.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemsFilesSetSettings.java
@@ -37,6 +37,7 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang.StringUtils;
+import static org.openide.util.NbBundle.Messages;
import org.openide.util.io.NbObjectInputStream;
import org.openide.util.io.NbObjectOutputStream;
import org.sleuthkit.autopsy.coreutils.Logger;
@@ -122,6 +123,10 @@ class InterestingItemsFilesSetSettings implements Serializable {
*
* @throws FilesSetsManagerException if file could not be read
*/
+ @Messages({
+ "# {0} - filePathStr",
+ "InterestingItemsFilesSetSettings.readSerializedDefinitions.failedReadSettings=Failed to read settings from ''{0}''"
+ })
private static Map readSerializedDefinitions(String serialFileName) throws FilesSetsManager.FilesSetsManagerException {
Path filePath = Paths.get(PlatformUtil.getUserConfigDirectory(), serialFileName);
File fileSetFile = filePath.toFile();
@@ -133,7 +138,10 @@ class InterestingItemsFilesSetSettings implements Serializable {
return filesSetsSettings.getFilesSets();
}
} catch (IOException | ClassNotFoundException ex) {
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Failed to read settings from %s", filePathStr), ex);
+
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readSerializedDefinitions_failedReadSettings(filePathStr),
+ ex);
}
} else {
return new HashMap<>();
@@ -151,6 +159,12 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - regex",
+ "InterestingItemsFilesSetSettings.readPathCondition.failedCompiledRegex=Error compiling ''{0}'' regex",
+ "# {0} - ruleName",
+ "InterestingItemsFilesSetSettings.readPathCondition.pathConditionCreationError=Error creating path condition for rule ''{0}''"
+ })
private static ParentPathCondition readPathCondition(Element ruleElement) throws FilesSetsManager.FilesSetsManagerException {
// Read in the optional path condition. Null is o.k., but if the attribute
// is there, be sure it is not malformed.
@@ -164,14 +178,17 @@ class InterestingItemsFilesSetSettings implements Serializable {
pathCondition = new ParentPathCondition(pattern);
} catch (PatternSyntaxException ex) {
logger.log(Level.SEVERE, "Error compiling " + PATH_REGEX_ATTR + " regex, ignoring malformed path condition definition", ex); // NON-NLS
- throw new FilesSetsManager.FilesSetsManagerException(String.format("error compiling %s regex", PATH_REGEX_ATTR), ex);
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readPathCondition_failedCompiledRegex(PATH_REGEX_ATTR),
+ ex);
}
} else if (!path.isEmpty() && pathRegex.isEmpty()) {
pathCondition = new ParentPathCondition(path);
}
if (pathCondition == null) {
// Malformed attribute.
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Error creating path condition for rule %s", readRuleName(ruleElement)));
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readPathCondition_pathConditionCreationError(readRuleName(ruleElement)));
}
}
return pathCondition;
@@ -188,6 +205,9 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - regex",
+ "InterestingItemsFilesSetSettings.readDateCondition.failedCompiledRegex=Error determining ''{0}'' number",})
private static DateCondition readDateCondition(Element ruleElement) throws FilesSetsManager.FilesSetsManagerException {
// Read in the optional path condition. Null is o.k., but if the attribute
// is there, be sure it is not malformed.
@@ -199,7 +219,10 @@ class InterestingItemsFilesSetSettings implements Serializable {
dateCondition = new DateCondition(Integer.parseInt(daysIncluded));
} catch (NumberFormatException ex) {
logger.log(Level.SEVERE, "Error creating condition for " + daysIncluded + ", ignoring malformed date condition definition", ex); // NON-NLS
- throw new FilesSetsManager.FilesSetsManagerException(String.format("error compiling %s regex", DAYS_INCLUDED_ATTR), ex);
+
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readDateCondition_failedCompiledRegex(DAYS_INCLUDED_ATTR),
+ ex);
}
}
}
@@ -234,6 +257,9 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - ruleName",
+ "InterestingItemsFilesSetSettings.readRule.missingNecessary=Invalid rule in files set, missing necessary conditions for ''{0}''",})
private static FilesSet.Rule readRule(Element elem) throws FilesSetsManager.FilesSetsManagerException {
String ruleName = readRuleName(elem);
FileNameCondition nameCondition = readNameCondition(elem);
@@ -244,7 +270,9 @@ class InterestingItemsFilesSetSettings implements Serializable {
DateCondition dateCondition = readDateCondition(elem); //if meta type condition or all four types of conditions the user can create are all null then don't make the rule
if (metaCondition == null || (nameCondition == null && pathCondition == null && mimeCondition == null && sizeCondition == null && dateCondition == null)) {
logger.log(Level.WARNING, "Error Reading Rule, " + ruleName + " was either missing a meta condition or contained only a meta condition. No rule was imported."); // NON-NLS
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Invalid Rule in FilesSet xml, missing necessary conditions for %s", ruleName));
+
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readRule_missingNecessary(ruleName));
}
return new FilesSet.Rule(ruleName, nameCondition, metaCondition, pathCondition, mimeCondition, sizeCondition, dateCondition);
}
@@ -260,6 +288,16 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - tagName",
+ "# {1} - ruleName",
+ "InterestingItemsFilesSetSettings.readNameCondition.invalidTag=Name condition has invalid tag name of ''{0}'' for rule ''{1}''",
+ "# {0} - regex",
+ "# {1} - rule",
+ "InterestingItemsFilesSetSettings.readNameCondition.errorCompilingRegex=Error compiling ''{0}'' regex in rule ''{1}''",
+ "# {0} - character",
+ "# {1} - rule",
+ "InterestingItemsFilesSetSettings.readNameCondition.illegalChar=File name has illegal character of ''{0}'' in rule ''{1}''",})
private static FileNameCondition readNameCondition(Element elem) throws FilesSetsManager.FilesSetsManagerException {
FileNameCondition nameCondition = null;
String content = elem.getTextContent();
@@ -273,17 +311,21 @@ class InterestingItemsFilesSetSettings implements Serializable {
} else if (elem.getTagName().equals(EXTENSION_RULE_TAG)) {
nameCondition = new FilesSet.Rule.ExtensionCondition(pattern);
} else {
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Name condition has invalid tag name of %s for rule %s", elem.getTagName(), readRuleName(elem)));
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readNameCondition_invalidTag(elem.getTagName(), readRuleName(elem)));
}
} else {
- logger.log(Level.SEVERE, "Error compiling " + elem.getTagName() + " regex, ignoring malformed '{0}' rule definition", readRuleName(elem)); // NON-NLS
- throw new FilesSetsManager.FilesSetsManagerException(String.format("error compiling %s regex in rule %s", REGEX_ATTR, readRuleName(elem)));
+ logger.log(Level.SEVERE, "Error compiling " + elem.getTagName() + " regex, ignoring malformed ''{0}'' rule definition", readRuleName(elem)); // NON-NLS
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readNameCondition_errorCompilingRegex(REGEX_ATTR, readRuleName(elem)));
}
} else {
for (String illegalChar : illegalFileNameChars) {
if (content.contains(illegalChar)) {
- logger.log(Level.SEVERE, elem.getTagName() + " content has illegal chars, ignoring malformed '{0}' rule definition", new Object[]{elem.getTagName(), readRuleName(elem)}); // NON-NLS
- throw new FilesSetsManager.FilesSetsManagerException(String.format("File name has illegal character of %s in rule %s", illegalChar, readRuleName(elem)));
+ logger.log(Level.SEVERE, elem.getTagName() + " content has illegal chars, ignoring malformed ''{0}'' rule definition", new Object[]{elem.getTagName(), readRuleName(elem)}); // NON-NLS
+
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readNameCondition_illegalChar(illegalChar, readRuleName(elem)));
}
}
if (elem.getTagName().equals(NAME_RULE_TAG)) {
@@ -326,6 +368,13 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - rule",
+ "InterestingItemsFilesSetSettings.readSizeCondition.notIntegerValue=Non integer size in files set for rule ''{0}''",
+ "# {0} - rule",
+ "InterestingItemsFilesSetSettings.readSizeCondition.invalidComparator=Invalid comparator or size unit in files set for rule ''{0}''",
+ "# {0} - rule",
+ "InterestingItemsFilesSetSettings.readSizeCondition.malformedXml=Files set is malformed missing at least one 'fileSize' attribute for rule ''{0}''",})
private static FileSizeCondition readSizeCondition(Element elem) throws FilesSetsManager.FilesSetsManagerException {
FileSizeCondition sizeCondition = null;
if (!elem.getAttribute(FS_COMPARATOR_ATTR).isEmpty() && !elem.getAttribute(FS_SIZE_ATTR).isEmpty() && !elem.getAttribute(FS_UNITS_ATTR).isEmpty()) {
@@ -336,15 +385,20 @@ class InterestingItemsFilesSetSettings implements Serializable {
sizeCondition = new FileSizeCondition(comparator, sizeUnit, size);
} catch (NumberFormatException nfEx) {
logger.log(Level.SEVERE, "Value in file size attribute was not an integer, unable to create FileSizeCondition for rule: " + readRuleName(elem), nfEx);
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Non integer size in FilesSet XML for rule %s", readRuleName(elem)), nfEx);
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readSizeCondition_notIntegerValue(readRuleName(elem)),
+ nfEx);
} catch (IllegalArgumentException iaEx) {
logger.log(Level.SEVERE, "Invalid Comparator symbol or Size Unit set in FilesSet xml, unable to create FileSizeCondition for rule: " + readRuleName(elem), iaEx);
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Invalid Comparator or Size unit in FilesSet XML for rule %s", readRuleName(elem)), iaEx);
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readSizeCondition_invalidComparator(readRuleName(elem)),
+ iaEx);
}
} //if all of them aren't populated but some of them are this is a malformed xml
else if (!elem.getAttribute(FS_COMPARATOR_ATTR).isEmpty() || !elem.getAttribute(FS_SIZE_ATTR).isEmpty() || !elem.getAttribute(FS_UNITS_ATTR).isEmpty()) {
logger.log(Level.SEVERE, "Invalid Comparator symbol or Size Unit set in FilesSet xml, unable to create FileSizeCondition for rule: " + readRuleName(elem));
- throw new FilesSetsManager.FilesSetsManagerException(String.format("XML malformed missing at least one fileSize attribute for rule %s", readRuleName(elem)));
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readSizeCondition_malformedXml(readRuleName(elem)));
}
return sizeCondition;
}
@@ -695,6 +749,11 @@ class InterestingItemsFilesSetSettings implements Serializable {
* @throws
* org.sleuthkit.autopsy.modules.interestingitems.FilesSetsManager.FilesSetsManagerException
*/
+ @Messages({
+ "# {0} - condition",
+ "# {1} - rule",
+ "InterestingItemsFilesSetSettings.readMetaTypeCondition.malformedXml=Files set is malformed for metatype condition, ''{0}'', in rule ''{1}''"
+ })
private static MetaTypeCondition readMetaTypeCondition(Element ruleElement) throws FilesSetsManager.FilesSetsManagerException {
MetaTypeCondition metaCondition = null;
// The rule must have a meta-type condition, unless a TSK Framework
@@ -716,7 +775,10 @@ class InterestingItemsFilesSetSettings implements Serializable {
default:
logger.log(Level.SEVERE, "Found {0} " + TYPE_FILTER_ATTR + " attribute with unrecognized value ''{0}'', ignoring malformed rule definition", conditionAttribute); // NON-NLS
// Malformed attribute.
- throw new FilesSetsManager.FilesSetsManagerException(String.format("Malformed XML for Metatype condition, %s, in rule %s", conditionAttribute, readRuleName(ruleElement)));
+
+ throw new FilesSetsManager.FilesSetsManagerException(
+ Bundle.InterestingItemsFilesSetSettings_readMetaTypeCondition_malformedXml(
+ conditionAttribute, readRuleName(ruleElement)));
}
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/StandardInterestingFilesSetsLoader.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/StandardInterestingFilesSetsLoader.java
index e20628e4e4..c9492db43d 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/StandardInterestingFilesSetsLoader.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/StandardInterestingFilesSetsLoader.java
@@ -28,7 +28,9 @@ import java.util.stream.Collectors;
import org.openide.modules.InstalledFileLocator;
import org.openide.modules.OnStart;
import org.openide.util.NbBundle.Messages;
+import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
/**
* When the interesting items module loads, this runnable loads standard
@@ -49,8 +51,19 @@ public class StandardInterestingFilesSetsLoader implements Runnable {
};
@Override
+ @Messages({
+ "StandardInterestingFilesSetsLoader_cannotLoadStandard=Unable to properly read standard interesting files sets.",
+ "StandardInterestingFilesSetsLoader_cannotLoadUserConfigured=Unable to properly read user-configured interesting files sets.",
+ "StandardInterestingFilesSetsLoader_cannotUpdateInterestingFilesSets=Unable to write updated configuration for interesting files sets to config directory."
+ })
public void run() {
- Map standardInterestingFileSets = readStandardFileXML();
+ Map standardInterestingFileSets = null;
+ try {
+ standardInterestingFileSets = readStandardFileXML();
+ } catch (FilesSetsManager.FilesSetsManagerException ex) {
+ handleError(Bundle.StandardInterestingFilesSetsLoader_cannotLoadStandard(), ex);
+ return;
+ }
// Call FilesSetManager.getInterestingFilesSets() to get a Map of the existing rule sets.
Map userConfiguredSettings = null;
@@ -58,10 +71,8 @@ public class StandardInterestingFilesSetsLoader implements Runnable {
userConfiguredSettings = FilesSetsManager.getInstance().getInterestingFilesSets();
} catch (FilesSetsManager.FilesSetsManagerException ex) {
LOGGER.log(Level.SEVERE, "Unable to properly read user-configured interesting files sets.", ex);
- }
-
- if (userConfiguredSettings == null) {
- userConfiguredSettings = new HashMap<>();
+ handleError(Bundle.StandardInterestingFilesSetsLoader_cannotLoadStandard(), ex);
+ return;
}
// Add each FilesSet read from the standard rules set XML files that is missing from the Map to the Map.
@@ -71,7 +82,20 @@ public class StandardInterestingFilesSetsLoader implements Runnable {
// Call FilesSetManager.setInterestingFilesSets with the updated Map.
FilesSetsManager.getInstance().setInterestingFilesSets(userConfiguredSettings);
} catch (FilesSetsManager.FilesSetsManagerException ex) {
- LOGGER.log(Level.SEVERE, "Unable to write updated configuration for interesting files sets to config directory.", ex);
+ handleError(Bundle.StandardInterestingFilesSetsLoader_cannotUpdateInterestingFilesSets(), ex);
+ }
+ }
+
+ /**
+ * Responsible for handling top level exceptions and displaying to the user.
+ *
+ * @param message The message to display and log.
+ * @param ex The exception (if any) to log.
+ */
+ private static void handleError(String message, Exception ex) {
+ LOGGER.log(Level.SEVERE, message, ex);
+ if (RuntimeProperties.runningWithGUI()) {
+ MessageNotifyUtil.Message.error(message);
}
}
@@ -82,12 +106,17 @@ public class StandardInterestingFilesSetsLoader implements Runnable {
*
* @return The mapping of files set keys to the file sets.
*/
- private static Map readStandardFileXML() {
+ private static Map readStandardFileXML() throws FilesSetsManager.FilesSetsManagerException {
Map standardInterestingFileSets = new HashMap<>();
- File[] standardFileSets = InstalledFileLocator.getDefault()
- .locate(CONFIG_DIR, StandardInterestingFilesSetsLoader.class.getPackage().getName(), false)
- .listFiles(DEFAULT_XML_FILTER);
+ File configFolder = InstalledFileLocator.getDefault().locate(
+ CONFIG_DIR, StandardInterestingFilesSetsLoader.class.getPackage().getName(), false);
+
+ if (configFolder == null || !configFolder.exists() || !configFolder.isDirectory()) {
+ throw new FilesSetsManager.FilesSetsManagerException("No standard interesting files set folder exists.");
+ }
+
+ File[] standardFileSets = configFolder.listFiles(DEFAULT_XML_FILTER);
for (File standardFileSetsFile : standardFileSets) { //NON-NLS
try {
diff --git a/Core/src/org/sleuthkit/autopsy/report/infrastructure/ReportWizardIterator.java b/Core/src/org/sleuthkit/autopsy/report/infrastructure/ReportWizardIterator.java
index 7a716e246f..52d15149a7 100644
--- a/Core/src/org/sleuthkit/autopsy/report/infrastructure/ReportWizardIterator.java
+++ b/Core/src/org/sleuthkit/autopsy/report/infrastructure/ReportWizardIterator.java
@@ -81,9 +81,15 @@ final class ReportWizardIterator implements WizardDescriptor.Iterator(panels);
panels.add(1, dataSourceSelectionPanel);
}
@@ -138,7 +145,7 @@ final class ReportWizardIterator implements WizardDescriptor.Iterator(panels);
panels.add(1, dataSourceSelectionPanel);
}
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
index 9bc02f491c..0c80cb5315 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
@@ -73,6 +73,7 @@ import org.sleuthkit.autopsy.coreutils.History;
import org.sleuthkit.autopsy.coreutils.LoggedTask;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
+import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
@@ -375,17 +376,26 @@ public class TimeLineController {
}
}
+
/**
- * "Shut down" Timeline. Remove all the case and ingest listers. Close the
- * timeline window.
+ * Shuts down the task executor in charge of handling case events.
+ */
+ void shutDownTimeLineListeners() {
+ ThreadUtils.shutDownTaskExecutor(executor);
+ }
+
+ /**
+ * "Shut down" Timeline. Close the timeline window.
*/
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
- public void shutDownTimeLine() {
+ public void shutDownTimeLineGui() {
if (topComponent != null) {
topComponent.close();
topComponent = null;
}
}
+
+
/**
* Add the case and ingest listeners, prompt for rebuilding the database if
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java
index a2261b07b0..eedf2bc52c 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java
+++ b/Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java
@@ -95,7 +95,8 @@ public class TimeLineModule {
*/
synchronized (controllerLock) {
if (controller != null) {
- SwingUtilities.invokeLater(controller::shutDownTimeLine);
+ controller.shutDownTimeLineListeners();
+ SwingUtilities.invokeLater(controller::shutDownTimeLineGui);
}
controller = null;
}
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java
index 16d43ab7fe..22beaa32d8 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/centralrepository/datamodel/CentralRepoPersonasTest.java
@@ -304,7 +304,7 @@ public class CentralRepoPersonasTest extends TestCase {
Assert.assertTrue(Instant.now().toEpochMilli() - dogPersona.getCreatedDate() < 600 * 1000);
// Step 3. Add Persona Aliases
- PersonaAlias alias1 = dogPersona.addAlias("Good Boy", "Coz he's is the best dog ever", Persona.Confidence.MEDIUM);
+ PersonaAlias alias1 = dogPersona.addAlias("Good Boy", "Coz he's is the best dog ever", Persona.Confidence.MODERATE);
PersonaAlias alias2 = dogPersona.addAlias("WoofWoof", "How many dumb comments can I come up with?", Persona.Confidence.LOW);
Assert.assertNotNull(alias1);
@@ -320,7 +320,7 @@ public class CentralRepoPersonasTest extends TestCase {
//Step 4: Add Persona metadata
- PersonaMetadata metadata1 = dogPersona.addMetadata("Color", "Black", "He's got thick black hair.", Persona.Confidence.MEDIUM);
+ PersonaMetadata metadata1 = dogPersona.addMetadata("Color", "Black", "He's got thick black hair.", Persona.Confidence.MODERATE);
PersonaMetadata metadata2 = dogPersona.addMetadata("Gender", "Male", "Because...", Persona.Confidence.LOW);
Assert.assertNotNull(metadata1);
@@ -341,7 +341,7 @@ public class CentralRepoPersonasTest extends TestCase {
.getOrCreateAccount(facebookAccountType, FACEBOOK_ID_CATDOG);
// Add an account to persona
- dogPersona.addAccount(catdogFBAccount, "Looks like dog, barks like a dog...", Persona.Confidence.MEDIUM);
+ dogPersona.addAccount(catdogFBAccount, "Looks like dog, barks like a dog...", Persona.Confidence.MODERATE);
// Get all acounts for the persona...
Collection personaAccounts = dogPersona.getPersonaAccounts();
@@ -409,14 +409,14 @@ public class CentralRepoPersonasTest extends TestCase {
// Step 3. Add Persona Aliases
- PersonaAlias alias1 = dogPersona.addAlias("Good Boy", "Coz he's is the best dog ever", Persona.Confidence.MEDIUM);
+ PersonaAlias alias1 = dogPersona.addAlias("Good Boy", "Coz he's is the best dog ever", Persona.Confidence.MODERATE);
PersonaAlias alias2 = dogPersona.addAlias("WoofWoof", "How many dumb comments can I come up with?", Persona.Confidence.LOW);
Assert.assertNotNull(alias1);
Assert.assertNotNull(alias2);
//Step 4: Add Persona metadata
- PersonaMetadata metadata1 = dogPersona.addMetadata("Color", "Black", "He's got thick black hair.", Persona.Confidence.MEDIUM);
+ PersonaMetadata metadata1 = dogPersona.addMetadata("Color", "Black", "He's got thick black hair.", Persona.Confidence.MODERATE);
PersonaMetadata metadata2 = dogPersona.addMetadata("Gender", "Male", "Because...", Persona.Confidence.LOW);
Assert.assertNotNull(metadata1);
@@ -444,7 +444,7 @@ public class CentralRepoPersonasTest extends TestCase {
.getOrCreateAccount(facebookAccountType, FACEBOOK_ID_CATDOG);
// Add an account to persona
- dogPersona.addAccount(catdogFBAccount, "Looks like dog, barks like a dog...", Persona.Confidence.MEDIUM);
+ dogPersona.addAccount(catdogFBAccount, "Looks like dog, barks like a dog...", Persona.Confidence.MODERATE);
// Step 6: Create a Second Persona
@@ -456,12 +456,12 @@ public class CentralRepoPersonasTest extends TestCase {
// Add Persona Aliases
- PersonaAlias catAlias1 = catPersona.addAlias("CutieKitty", "Because", Persona.Confidence.MEDIUM);
+ PersonaAlias catAlias1 = catPersona.addAlias("CutieKitty", "Because", Persona.Confidence.MODERATE);
Assert.assertNotNull(catAlias1);
//Step 4: Add Persona metadata
- PersonaMetadata catMetadata1 = catPersona.addMetadata("Color", "White", "White as snow.", Persona.Confidence.MEDIUM);
+ PersonaMetadata catMetadata1 = catPersona.addMetadata("Color", "White", "White as snow.", Persona.Confidence.MODERATE);
PersonaMetadata catMetadata2 = catPersona.addMetadata("Breed", "Persian", "Just Because...", Persona.Confidence.LOW);
PersonaMetadata catMetadata3 = catPersona.addMetadata("Legs", "Four", "I counted", Persona.Confidence.HIGH);
@@ -618,7 +618,7 @@ public class CentralRepoPersonasTest extends TestCase {
CentralRepository.getInstance().addArtifactInstance(dogEmailAcctInstance);
- PersonaAccount pa3 = dogPersona.addAccount(dogEmailAccount, "Thats definitely a dog email account", Persona.Confidence.MEDIUM);
+ PersonaAccount pa3 = dogPersona.addAccount(dogEmailAccount, "Thats definitely a dog email account", Persona.Confidence.MODERATE);
Assert.assertNotNull(pa3);
Assert.assertTrue(pa3.getPersona().getName().equalsIgnoreCase(DOG_PERSONA_NAME));
@@ -651,7 +651,7 @@ public class CentralRepoPersonasTest extends TestCase {
CentralRepository.getInstance().addArtifactInstance(catWhatsAppAccountInstance2);
- PersonaAccount pa4 = catPersona.addAccount(catWhatsAppAccount, "The cat has a WhatsApp account", Persona.Confidence.MEDIUM);
+ PersonaAccount pa4 = catPersona.addAccount(catWhatsAppAccount, "The cat has a WhatsApp account", Persona.Confidence.MODERATE);
Assert.assertNotNull(pa4);
Assert.assertTrue(pa4.getPersona().getName().equalsIgnoreCase(CAT_PERSONA_NAME));
@@ -1059,9 +1059,16 @@ public class CentralRepoPersonasTest extends TestCase {
}
// Get account with exact match
- Collection accountsWithKnownIdentifier = CentralRepoAccount.getAccountsWithIdentifier(CAT_WHATSAPP_ID);
+ Collection accountsWithKnownIdentifier = CentralRepoAccount.getAccountsWithIdentifier("joeexotic555@yahoo.com");
Assert.assertEquals(1, accountsWithKnownIdentifier.size());
for (CentralRepoAccount acc: accountsWithKnownIdentifier) {
+ Assert.assertTrue(acc.getIdentifier().contains("joeexotic555@yahoo.com"));
+ }
+
+ // Get account with exact match
+ Collection accountsWithKnownIdentifier2 = CentralRepoAccount.getAccountsWithIdentifier(CAT_WHATSAPP_ID);
+ Assert.assertEquals(1, accountsWithKnownIdentifier2.size());
+ for (CentralRepoAccount acc: accountsWithKnownIdentifier2) {
Assert.assertTrue(acc.getIdentifier().contains(CAT_WHATSAPP_ID));
}
diff --git a/CoreLibs/ivy.xml b/CoreLibs/ivy.xml
index 4853d1f90e..813f35fa4d 100644
--- a/CoreLibs/ivy.xml
+++ b/CoreLibs/ivy.xml
@@ -14,7 +14,7 @@
-
+
diff --git a/CoreLibs/nbproject/project.properties b/CoreLibs/nbproject/project.properties
index 42f8292d5b..8214f4cd2a 100644
--- a/CoreLibs/nbproject/project.properties
+++ b/CoreLibs/nbproject/project.properties
@@ -43,8 +43,8 @@ file.reference.jfxtras-common-8.0-r4.jar=release/modules/ext/jfxtras-common-8.0-
file.reference.jfxtras-controls-8.0-r4.jar=release/modules/ext/jfxtras-controls-8.0-r4.jar
file.reference.jfxtras-fxml-8.0-r4.jar=release/modules/ext/jfxtras-fxml-8.0-r4.jar
file.reference.jna-3.4.0.jar=release/modules/ext/jna-3.4.0.jar
-file.reference.jna-5.5.0.jar=release\\modules\\ext\\jna-5.5.0.jar
-file.reference.jna-platform-5.5.0.jar=release\\modules\\ext\\jna-platform-5.5.0.jar
+file.reference.jna-5.6.0.jar=release/modules/ext/jna-5.6.0.jar
+file.reference.jna-platform-5.6.0.jar=release/modules/ext/jna-platform-5.6.0.jar
file.reference.joda-time-2.4.jar=release/modules/ext/joda-time-2.4.jar
file.reference.jsr305-1.3.9.jar=release/modules/ext/jsr305-1.3.9.jar
file.reference.LGoodDatePicker-10.3.1.jar=release/modules/ext/LGoodDatePicker-10.3.1.jar
diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml
index d5169a8965..bbe16d23dc 100644
--- a/CoreLibs/nbproject/project.xml
+++ b/CoreLibs/nbproject/project.xml
@@ -898,10 +898,6 @@
ext/commons-csv-1.4.jar
release/modules/ext/commons-csv-1.4.jar
-
- ext/jna-5.5.0.jar
- release/modules/ext/jna-5.5.0.jar
-
ext/imageio-sgi-3.2.jar
release/modules/ext/imageio-sgi-3.2.jar
@@ -922,6 +918,10 @@
ext/commons-compress-1.18.jar
release/modules/ext/commons-compress-1.18.jar
+
+ ext/jna-platform-5.6.0.jar
+ release\modules\ext\jna-platform-5.6.0.jar
+
ext/opencv-248.jar
release/modules/ext/opencv-248.jar
@@ -946,6 +946,10 @@
ext/imageio-bmp-3.2.jar
release/modules/ext/imageio-bmp-3.2.jar
+
+ ext/jna-5.6.0.jar
+ release\modules\ext\jna-5.6.0.jar
+
ext/commons-lang-2.6.jar
release/modules/ext/commons-lang-2.6.jar
@@ -1014,10 +1018,6 @@
ext/dom4j-1.6.1.jar
release/modules/ext/dom4j-1.6.1.jar
-
- ext/jna-platform-5.5.0.jar
- release/modules/ext/jna-platform-5.5.0.jar
-
ext/imageio-metadata-3.2.jar
release/modules/ext/imageio-metadata-3.2.jar
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java
index 7a9a446ae6..68487d0fcb 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJob.java
@@ -33,7 +33,7 @@ import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor;
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestJob.Snapshot;
+import org.sleuthkit.autopsy.ingest.Snapshot;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestThreadActivitySnapshot;
import org.sleuthkit.autopsy.ingest.IngestProgressSnapshotProvider;
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java
index c37b834348..288eff8d92 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java
@@ -37,7 +37,7 @@ import org.sleuthkit.autopsy.datamodel.NodeProperty;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.Stage;
import org.sleuthkit.autopsy.guiutils.DurationCellRenderer;
import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestJob;
+import org.sleuthkit.autopsy.ingest.Snapshot;
/**
* A node which represents all AutoIngestJobs of a given AutoIngestJobStatus.
@@ -96,7 +96,7 @@ final class AutoIngestJobsNode extends AbstractNode {
* they can be changed by events in other threads which
*/
private final Stage jobStage;
- private final List jobSnapshot;
+ private final List jobSnapshot;
private final Integer jobPriority;
AutoIngestJobWrapper(AutoIngestJob job) {
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java
index 9969fbb486..ba34a98620 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java
@@ -556,7 +556,9 @@ final class ChromeCacheExtractor {
List effFiles = fileManager.findFiles(dataSource, "f_%", cachePath); //NON-NLS
for (AbstractFile abstractFile : effFiles ) {
- this.externalFilesTable.put(cachePath + abstractFile.getName(), abstractFile);
+ if (cachePath.equals(abstractFile.getParentPath()) && abstractFile.isFile()) {
+ this.externalFilesTable.put(cachePath + abstractFile.getName(), abstractFile);
+ }
}
}
/**
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java
index 1e6fe08d02..2d1a967efc 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java
@@ -66,7 +66,7 @@ final class ExtractPrefetch extends Extract {
private static final String MODULE_NAME = "extractPREFETCH"; //NON-NLS
private static final String PREFETCH_TSK_COMMENT = "Prefetch File";
- private static final String PREFETCH_FILE_LOCATION = "/Windows/Prefetch";
+ private static final String PREFETCH_FILE_LOCATION = "/windows/prefetch";
private static final String PREFETCH_TOOL_FOLDER = "markmckinnon"; //NON-NLS
private static final String PREFETCH_TOOL_NAME_WINDOWS_64 = "parse_prefetch_x64.exe"; //NON-NLS
private static final String PREFETCH_TOOL_NAME_WINDOWS_32 = "parse_prefetch_x32.exe"; //NON-NLS
@@ -112,9 +112,9 @@ final class ExtractPrefetch extends Extract {
return;
}
- String modOutFile = modOutPath + File.separator + PREFETCH_PARSER_DB_FILE;
+ String modOutFile = modOutPath + File.separator + dataSource.getName() + "-" + PREFETCH_PARSER_DB_FILE;
try {
- String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), PREFETCH_DIR_NAME );
+ String tempDirPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME );
parsePrefetchFiles(prefetchDumper, tempDirPath, modOutFile, modOutPath);
createAppExecArtifacts(modOutFile, dataSource);
} catch (IOException ex) {
@@ -148,8 +148,8 @@ final class ExtractPrefetch extends Extract {
return;
}
- String prefetchFile = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), PREFETCH_DIR_NAME) + File.separator + pFile.getName();
- if (pFile.getParentPath().contains(PREFETCH_FILE_LOCATION)) {
+ String prefetchFile = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), dataSource.getName() + "-" + PREFETCH_DIR_NAME) + File.separator + pFile.getName();
+ if (pFile.getParentPath().toLowerCase().contains(PREFETCH_FILE_LOCATION.toLowerCase())) {
try {
ContentUtils.writeToFile(pFile, new File(prefetchFile));
} catch (IOException ex) {
@@ -293,7 +293,7 @@ final class ExtractPrefetch extends Extract {
}
}
} else {
- logger.log(Level.SEVERE, "File has a null value " + prefetchFileName);//NON-NLS
+ logger.log(Level.WARNING, "File has a null value " + prefetchFileName);//NON-NLS
}
}
@@ -371,17 +371,21 @@ final class ExtractPrefetch extends Extract {
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
try {
- files = fileManager.findFiles(dataSource, fileName, filePath); //NON-NLS
+ files = fileManager.findFiles(dataSource, fileName); //NON-NLS
+
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to find prefetch files.", ex); //NON-NLS
return null; // No need to continue
}
- if (!files.isEmpty()) {
- return files.get(0);
- } else {
- return null;
+ for (AbstractFile pFile : files) {
+
+ if (pFile.getParentPath().toLowerCase().contains(filePath.toLowerCase())) {
+ return pFile;
+ }
}
+
+ return null;
}
diff --git a/appveyor.yml b/appveyor.yml
index b3cbb4032c..56582bbe43 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -42,6 +42,9 @@ build_script:
- ps: ant -version
- cmd: ant dist
- ps: popd
+ - ps: pushd case-uco/java
+ - cmd: ant -q
+ - ps: popd
- cd %APPVEYOR_BUILD_FOLDER%
- cmd: ant -q build
diff --git a/build.xml b/build.xml
index 1f00a24f79..740f8b82ad 100644
--- a/build.xml
+++ b/build.xml
@@ -214,7 +214,7 @@
-
+
@@ -321,4 +321,13 @@
+
+
+
+
+
+
+
+
+
diff --git a/docs/doxygen-user/data_sources.dox b/docs/doxygen-user/data_sources.dox
index 6aeed0bbb1..9a5dcd3692 100644
--- a/docs/doxygen-user/data_sources.dox
+++ b/docs/doxygen-user/data_sources.dox
@@ -47,10 +47,11 @@ Data sources can be removed from cases created with Autopsy 4.14.0 and later. Se
\section ds_img Adding a Disk Image
Autopsy supports disk images in the following formats:
-- Raw Single (For example: *.img, *.dd, *.raw, *.bin)
-- Raw Split (For example: *.001, *.002, *.aa, *.ab, etc)
-- EnCase (For example: *.e01, *.e02, etc)
-- Virtual Machines (For example: *.vmdk, *.vhd)
+- Raw Single (*.img, *.dd, *.raw, *.bin)
+- Raw Split (*.001, *.aa)
+- EnCase (*.e01)
+- Virtual Machine Disk (*.vmdk)
+- Virtual Hard Disk (*.vhd)
\image html data_source_disk_image.png
diff --git a/docs/doxygen-user/file_discovery.dox b/docs/doxygen-user/file_discovery.dox
index f711cb0d01..92f2e5a4e9 100644
--- a/docs/doxygen-user/file_discovery.dox
+++ b/docs/doxygen-user/file_discovery.dox
@@ -1,12 +1,12 @@
-/*! \page file_discovery_page File Discovery
+/*! \page discovery_page Discovery
\section file_disc_overview Overview
-The file discovery tool shows images, videos, or documents that match a set of filters configured by the user. You can choose how to group and order your results in order to see the most relevant data first.
+The discovery tool shows images, videos, or documents that match a set of filters configured by the user. You can choose how to group and order your results in order to see the most relevant data first.
\section file_disc_prereq Prerequisites
-We suggest running all \ref ingest_page "ingest modules" before launching file discovery, but if time is a factor the following are the modules that are the most important. You will see a warning if you open file discovery without running the \ref file_type_identification_page, the \ref hash_db_page, and the \ref EXIF_parser_page.
+We suggest running all \ref ingest_page "ingest modules" before launching discovery, but if time is a factor the following are the modules that are the most important. You will see a warning if you open discovery without running the \ref file_type_identification_page, the \ref hash_db_page, and the \ref EXIF_parser_page.
Required ingest modules:
@@ -24,22 +24,24 @@ Optional ingest modules:
\ref embedded_file_extractor_page - Allows display of an image contained in a document
-\section file_disc_run Running File Discovery
+\section file_disc_run Running Discovery
-To launch file discovery, either click the "File Discovery" icon near the top of the Autopsy UI or go to "Tools", "File Discovery". There are three steps when setting up file discovery, which flow from the top of the panel to the bottom:
+To launch discovery, either click the "Discovery" icon near the top of the Autopsy UI or go to "Tools", "Discovery". There are three steps when setting up discovery, which flow from the top of the panel to the bottom:
\ref file_disc_type "Choose the file type"
\ref file_disc_filtering "Set up filters"
\ref file_disc_grouping "Choose how to group and sort the results
-Once everything is set up, use the "Show" button at the bottom of the left panel to display your results. If you want to cancel a search in progress you can use the "Cancel" button.
+\image html FileDiscovery/fd_setup.png
+
+Once everything is set up, use the "Show" button at the bottom right to display your results.
\image html FileDiscovery/fd_main.png
\subsection file_disc_type File Type
-The first step is choosing whether you want to display images, videos, or documents. The file type is determined by the MIME type of the file, which is why the \ref file_type_identification_page must be run to see any results. Switching between the file types will clear any results being displayed and reset the filters.
+The first step is choosing whether you want to display images, videos, or documents. The file type is determined by the MIME type of the file, which is why the \ref file_type_identification_page must be run to see any results. Switching between the file types will reset the filters.
\image html FileDiscovery/fd_fileType.png
@@ -79,13 +81,13 @@ This means the file must have a "User Content Suspected" result associated with
\subsubsection file_disc_hash_filter Hash Set Filter
-The hash set filter restricts the results to files found in the selected hash sets. Only notable hash sets that have hits in the current case are listed (though those hits may not be images or videos). See the \ref hash_db_page page for more information on creating and using hash sets.
+The hash set filter restricts the results to files found in the selected hash sets. Only notable hash sets that have hits in the current case are listed. See the \ref hash_db_page page for more information on creating and using hash sets.
\image html FileDiscovery/fd_hashSetFilter.png
\subsubsection file_disc_int_filter Interesting Item Filter
-The interesting item filter restricts the results to files found in the selected interesting item rule sets. Only interesting file rule sets that have results in the current case are listed (though those matches may not be images or videos). See the \ref interesting_files_identifier_page page for more information on creating and using interesting item rule sets.
+The interesting item filter restricts the results to files found in the selected interesting item rule sets. Only interesting file rule sets that have results in the current case are listed. See the \ref interesting_files_identifier_page page for more information on creating and using interesting item rule sets.
\image html FileDiscovery/fd_interestingItemsFilter.png
@@ -125,7 +127,7 @@ The final options are for how you want to group and sort your results.
\image html FileDiscovery/fd_grouping.png
-The first option lets you choose the top level grouping for your results and the second option lets you choose how to sort them. The groups appear in the middle column of the file discovery panel. Note that some of the grouping options may not always appear - for example, grouping by past occurrences will only be present if the \ref central_repo_page is enabled, and grouping by hash set will only be present if there are hash set hits in your current case. The example below shows the groups created using the default options (group by file size, order groups by group name):
+The first option lets you choose the top level grouping for your results and the second option lets you choose how to sort them. The groups appear in the left column of the results window. Note that some of the grouping options may not always appear - for example, grouping by past occurrences will only be present if the \ref central_repo_page is enabled, and grouping by hash set will only be present if there are hash set hits in your current case. The example below shows the groups created using the default options (group by file size, order groups by group name):
\image html FileDiscovery/fd_groupingSize.png
@@ -135,13 +137,15 @@ In the case of file size and past occurrences, ordering by group name is based o
The interesting items filter was not enabled so most images ended up in the "None" group, meaning they have no interesting file result associated with them. The final group in the list contains a file that matched both interesting item rule sets.
-The last grouping and sorting option is choosing how to sort the results within a group. This is the order of the results in the top right panel after selecting a group from the middle column. Note that due to the merging of results with the same hash in that panel, ordering by file name, path, or data source can vary. See the \ref file_disc_dedupe section below for more information.
+The last grouping and sorting option is choosing how to sort the results within a group. This is the order of the results on the right side of the results window after selecting a group from the left column. Note that due to the merging of results with the same hash in that panel, ordering by file name, path, or data source can vary. See the \ref file_disc_dedupe section below for more information.
\section file_disc_results Viewing Results
\subsection file_disc_results_overview Overview
-Once you select your options and click "Show", you'll see a list of groups in the middle panel. Selecting one of these groups will display the results from that group in the right panel. If your results are images, you'll see thumbnails for each image in the top area of the right panel.
+Once you select your options and click "Search", you'll see a new window with the list of groups on the left side. Selecting one of these groups will display the results from that group on the right side. Selecting a result will cause a panel to rise showing more details about each instance of that result. You can manually raise and lower this panel using the large arrows on the right side of the divider.
+
+If your results are images, you'll see thumbnails for each image in the top area of the right panel.
\image html FileDiscovery/fd_resultGroups.png
diff --git a/docs/doxygen-user/geolocation.dox b/docs/doxygen-user/geolocation.dox
index 70ca212a41..421d5afb58 100644
--- a/docs/doxygen-user/geolocation.dox
+++ b/docs/doxygen-user/geolocation.dox
@@ -8,15 +8,17 @@ The Geolocation window shows artifacts that have longitude and latitude attribut
\section geo_usage Usage
-To open the Geolocation window, go to "Tools" and then select "Geolocation".
+To open the Geolocation window, click on the "Geolocation" button near the top of the main window, or go to "Tools" and then select "Geolocation".
\subsection geo_navigation General Usage
-You can move the map by clicking and dragging, and zoom using either the mouse wheel or the slider in the bottom left of the map. If a map tile is not available the tile will appear grey but the waypoints will still be displayed. This is more likely to happen when changing the default \ref geo_map_options.
+You can move the map by clicking and dragging, and zoom using either the mouse wheel or the slider in the bottom left of the map. If a map tile is not available, the tile will appear grey but the waypoints will still be displayed. This is more likely to happen when changing the default \ref geo_map_options. Different types of waypoint will be displayed in different colors. You can use the key in the lower left to easily identify the type of each waypoint. Some types will also use different icons on the map. For example, individual track points will be displayed as smaller circles. The entire track will be highlighted when selecting an individual track point.
-You can left click on a waypoint to highlight that waypoint and show a details pop-up in the upper right corner of the map. The details pop-up will be updated as you click on different waypoints. The data displayed will vary depending on the type of waypoint. For example, this is the endpoint of a GPS Route:
+\image html geo_track_points.png
-\image html geo_details_route.png
+You can left click on a waypoint to highlight that waypoint and show a details pop-up in the upper right corner of the map. The details pop-up will be updated as you click on different waypoints. The data displayed will vary depending on the type of waypoint. For example, this is a GPS bookmark:
+
+\image html geo_details_bookmark.png
While this is an image with GPS coordinates found by the \ref EXIF_parser_page :
@@ -28,7 +30,7 @@ You can also right click on a waypoint to bring up a similar menu to what you'd
\subsection geo_filter Filtering
-The filters are displayed on the left side of the screen. The top filter lets you filter the waypoints based on timestamp. If enabled, you will only see waypoints with a timestamp within N days of the most recent waypoint (not the current date). When using this filter you can also choose whether you want to see waypoints with no timestamp.
+The filters are displayed on the left side of the screen. The top filter lets you filter the waypoints based on timestamp. If enabled, you will only see waypoints with a timestamp within N days of the most recent waypoint (not the current date). When using this filter, you can also choose whether you want to see waypoints with no timestamp.
\image html geo_filter_time.png
@@ -63,7 +65,7 @@ on the Geolocation panel in the Options dialog. There are four options for geolo
OpenStreetMap server
-You can specify the address of a OSM tile server. A list of online tile servers can be found here: https://wiki.openstreetmap.org/wiki/Tile_servers.
+ You can specify the address of an OSM tile server. A list of online tile servers can be found here: https://wiki.openstreetmap.org/wiki/Tile_servers.
Tile servers may have restrictions on them that prevent Autopsy from accessing their tiles. If the tiles URL is something of the form "http://tiles.example.org/${z}/${x}/${y}.png",
then you'll need to enter "http://tiles.example.org" in the options panel.
@@ -71,7 +73,7 @@ then you'll need to enter "http://tiles.example.org" in the options panel.
OpenStreetMap zip file
-Allows offline use of zip file of OSM tile images
+ Allows offline use of a zip file of OSM tile images
Details on how to generate tile zip files are \ref geo_generate_zip "below".
MBTiles file
@@ -86,7 +88,7 @@ not the "Vector Tiles".
\subsection geo_generate_zip Using Maperative to Generate Tile Image Zip Files
-Maperative is a tool for drawing maps, however it can also be used to create tile images. Maperative download and documentations can be found at http://maperitive.net/ .
+Maperative is a tool for drawing maps, however it can also be used to create tile images. Maperative downloads and documentations can be found at http://maperitive.net/ .
By default Maperative uses an online tile server to generate the map. For offline use, users can supply an OpenStreetMap raw data extract.
@@ -94,8 +96,8 @@ By default Maperative uses an online tile server to generate the map. For offli
\subsubsection geo_generate_tile_image Generating tile image zip files using any map data source:
Download and run Maperative.
- Center and zoom in on area of interest. The larger the area, the more tiles that will be generated. Tiles will be generated for the area visible in the map panel.
- Choose whether you want to use the default zoom levels or custom ones. Zoom levels in Mapertive start at 1. As the zoom level increases so will the quantity of tiles generated as well as the detail of each tile. Generating tiles, especially for heavily populated areas, may take time, please be patient with either method.
+ Center and zoom in on an area of interest. The larger the area, the more tiles that will be generated. Tiles will be generated for the area visible in the map panel.
+ Choose whether you want to use the default zoom levels or custom ones. Zoom levels in Mapertive start at 1. As the zoom level increases, so will the quantity of tiles generated as well as the detail of each tile. Generating tiles, especially for heavily populated areas, may take time. Please be patient with either method.
To generate tiles using the default zoom levels, select Tools->Generate Tiles
@@ -103,13 +105,13 @@ By default Maperative uses an online tile server to generate the map. For offli
Maperative will generate tiles for zoom levels depending on the area of interest and the zoom level. For example, if you start all the way zoomed out, you will likely see levels 1 through 10 generated. If you start zoomed in, you might see levels 10 through 14.
- Maperative provides a command interface which allows you to generate tiles for specific zoom levels. Commands can be run in the Command prompt text field at the bottom of the Maperative window. For a full list of commands see the Maperative documentation or http://maperitive.net/docs/ . The generate-tiles command can be used to generate tiles for the area visible in the map panel area. For full details on generate-tiles see the documentation included with Maperative or http://maperitive.net/docs/Commands/GenerateTiles.html . The following is a sample command to generate tiles for zoom level 2 to 3 into the folder Tiles:
+ Maperative provides a command interface which allows you to generate tiles for specific zoom levels. Commands can be run in the Command prompt text field at the bottom of the Maperative window. For a full list of commands see the Maperative documentation or http://maperitive.net/docs/ . The generate-tiles command can be used to generate tiles for the area visible in the map panel area. For full details on generate-tiles see the documentation included with Maperative or http://maperitive.net/docs/Commands/GenerateTiles.html . The following is a sample command to generate tiles for zoom levels 2 to 3 into the folder Tiles:
\verbatim generate-tiles minzoom=2 maxzoom=3 tilesdir=C:\Tiles \endverbatim
\image html geo_command_line.png
- For use in autopsy, the generated tile images need to be in a zip file. To create a zip of tiles for use in Autopsy, zip up all of the folders in the tile file output directory. Do not include the parent directory, just the numbered folders contained within. If you use the menu bar option or did not specify a folder in your command the generated tiles will be located in <Maperative Install Location>\\Tiles.
+ For use in autopsy, the generated tile images need to be in a zip file. To create a zip of tiles for use in Autopsy, zip up all of the folders in the tile file output directory. Do not include the parent directory, just the numbered folders contained within. If you use the menu bar option or did not specify a folder in your command, the generated tiles will be located in <Maperative Install Location>\\Tiles.
\image html geo_tile_folder.png
@@ -135,7 +137,7 @@ To add a data source to Maperative:
For ease of use, users may want to merge OSM raw data extracts. OSMConvert is a tool that can be used to merge OSM raw data extracts.
-To merge two OSM raw data extracts country1.osm.pbf and country2.osm.pbf use the following commands. Note that this assumes that osmcovert and the files are in the same directory; if they are not be sure to use full paths.
+To merge two OSM raw data extracts country1.osm.pbf and country2.osm.pbf use the following commands. Note that this assumes that osmcovert and the files are in the same directory; if they are not, be sure to use full paths.
\verbatim
osmconvert country1.osm.pbf -o=country1.o5m
osmconvert country2.osm.pbf -o=country2.o5m
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_documents.png b/docs/doxygen-user/images/FileDiscovery/fd_documents.png
index 7170798ce7..3f60954eae 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_documents.png and b/docs/doxygen-user/images/FileDiscovery/fd_documents.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_dupeEx.png b/docs/doxygen-user/images/FileDiscovery/fd_dupeEx.png
index 4d15a718e2..e94e3d1339 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_dupeEx.png and b/docs/doxygen-user/images/FileDiscovery/fd_dupeEx.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_grouping.png b/docs/doxygen-user/images/FileDiscovery/fd_grouping.png
index 7d1441bee1..1a671c8999 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_grouping.png and b/docs/doxygen-user/images/FileDiscovery/fd_grouping.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_main.png b/docs/doxygen-user/images/FileDiscovery/fd_main.png
index 6e44376a10..69e7a109a9 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_main.png and b/docs/doxygen-user/images/FileDiscovery/fd_main.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_resultGroups.png b/docs/doxygen-user/images/FileDiscovery/fd_resultGroups.png
index b8362e610f..5872da0ad1 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_resultGroups.png and b/docs/doxygen-user/images/FileDiscovery/fd_resultGroups.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_setup.png b/docs/doxygen-user/images/FileDiscovery/fd_setup.png
new file mode 100644
index 0000000000..32018808a3
Binary files /dev/null and b/docs/doxygen-user/images/FileDiscovery/fd_setup.png differ
diff --git a/docs/doxygen-user/images/FileDiscovery/fd_videos.png b/docs/doxygen-user/images/FileDiscovery/fd_videos.png
index 53dfbd8339..4cf58e29a6 100644
Binary files a/docs/doxygen-user/images/FileDiscovery/fd_videos.png and b/docs/doxygen-user/images/FileDiscovery/fd_videos.png differ
diff --git a/docs/doxygen-user/images/geo_context_menu.png b/docs/doxygen-user/images/geo_context_menu.png
index e6da8484f9..fc1d216228 100644
Binary files a/docs/doxygen-user/images/geo_context_menu.png and b/docs/doxygen-user/images/geo_context_menu.png differ
diff --git a/docs/doxygen-user/images/geo_details.png b/docs/doxygen-user/images/geo_details.png
index 8932479524..92e1b4ccf7 100644
Binary files a/docs/doxygen-user/images/geo_details.png and b/docs/doxygen-user/images/geo_details.png differ
diff --git a/docs/doxygen-user/images/geo_details_bookmark.png b/docs/doxygen-user/images/geo_details_bookmark.png
new file mode 100644
index 0000000000..7dec786a51
Binary files /dev/null and b/docs/doxygen-user/images/geo_details_bookmark.png differ
diff --git a/docs/doxygen-user/images/geo_details_route.png b/docs/doxygen-user/images/geo_details_route.png
deleted file mode 100644
index 0f47ba0654..0000000000
Binary files a/docs/doxygen-user/images/geo_details_route.png and /dev/null differ
diff --git a/docs/doxygen-user/images/geo_filter_type.png b/docs/doxygen-user/images/geo_filter_type.png
index 5e99ad253c..b0244b9d2e 100644
Binary files a/docs/doxygen-user/images/geo_filter_type.png and b/docs/doxygen-user/images/geo_filter_type.png differ
diff --git a/docs/doxygen-user/images/geo_main.png b/docs/doxygen-user/images/geo_main.png
index 43addca633..20a34fea4b 100644
Binary files a/docs/doxygen-user/images/geo_main.png and b/docs/doxygen-user/images/geo_main.png differ
diff --git a/docs/doxygen-user/images/geo_track_points.png b/docs/doxygen-user/images/geo_track_points.png
new file mode 100644
index 0000000000..1aecf18bdd
Binary files /dev/null and b/docs/doxygen-user/images/geo_track_points.png differ
diff --git a/docs/doxygen-user/main.dox b/docs/doxygen-user/main.dox
index 63114401bd..0d808519cd 100644
--- a/docs/doxygen-user/main.dox
+++ b/docs/doxygen-user/main.dox
@@ -70,7 +70,7 @@ The following topics are available here:
- \subpage timeline_page
- \subpage communications_page
- \subpage geolocation_page
- - \subpage file_discovery_page
+ - \subpage discovery_page
- Reporting
- \subpage tagging_page
diff --git a/test/script/regression.py b/test/script/regression.py
index a36929491c..9999a5c563 100644
--- a/test/script/regression.py
+++ b/test/script/regression.py
@@ -1074,7 +1074,7 @@ class TestResultsDiffer(object):
# Ensure gold is passed before output
(subprocess.check_output(["diff", '-r', '-N', '-x', '*.png', '-x', '*.ico', '--ignore-matching-lines',
'HTML Report Generated on \|Autopsy Report for case \|Case:\|Case Number:'
- '\|Examiner:', gold_report_path, output_report_path]))
+ '\|Examiner:\|Unalloc_', gold_report_path, output_report_path]))
print_report("", "REPORT COMPARISON", "The test html reports matched the gold reports")
return True
except subprocess.CalledProcessError as e:
diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py
index baab8573df..f152cd923a 100644
--- a/test/script/tskdbdiff.py
+++ b/test/script/tskdbdiff.py
@@ -208,10 +208,12 @@ class TskDbDiff(object):
while (row != None):
# File Name and artifact type
+ # Remove parent object ID from Unalloc file name
+ normalizedName = re.sub('^Unalloc_[0-9]+_', 'Unalloc_', row["name"])
if(row["parent_path"] != None):
- database_log.write(row["parent_path"] + row["name"] + ' ')
+ database_log.write(row["parent_path"] + normalizedName + ' ')
else:
- database_log.write(row["name"] + ' ')
+ database_log.write(normalizedName + ' ')
if isMultiUser:
attribute_cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
@@ -427,6 +429,7 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
files_index = line.find('INSERT INTO "tsk_files"') > -1 or line.find('INSERT INTO tsk_files ') > -1
path_index = line.find('INSERT INTO "tsk_files_path"') > -1 or line.find('INSERT INTO tsk_files_path ') > -1
object_index = line.find('INSERT INTO "tsk_objects"') > -1 or line.find('INSERT INTO tsk_objects ') > -1
+ vs_parts_index = line.find('INSERT INTO "tsk_vs_parts"') > -1 or line.find('INSERT INTO tsk_vs_parts ') > -1
report_index = line.find('INSERT INTO "reports"') > -1 or line.find('INSERT INTO reports ') > -1
layout_index = line.find('INSERT INTO "tsk_file_layout"') > -1 or line.find('INSERT INTO tsk_file_layout ') > -1
data_source_info_index = line.find('INSERT INTO "data_source_info"') > -1 or line.find('INSERT INTO data_source_info ') > -1
@@ -462,6 +465,12 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
# remove object ID
if files_index:
newLine = ('INSERT INTO "tsk_files" VALUES(' + ', '.join(fields_list[1:]) + ');')
+ # Remove object ID from Unalloc file name
+ newLine = re.sub('Unalloc_[0-9]+_', 'Unalloc_', newLine)
+ return newLine
+ # remove object ID
+ elif vs_parts_index:
+ newLine = ('INSERT INTO "tsk_vs_parts" VALUES(' + ', '.join(fields_list[1:]) + ');')
return newLine
# remove group ID
elif ig_groups_index:
@@ -496,7 +505,9 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
elif layout_index:
obj_id = fields_list[0]
path= files_table[int(obj_id)]
- newLine = ('INSERT INTO "tsk_file_layout" VALUES(' + path + ', ' + ', '.join(fields_list[1:]) + ');')
+ newLine = ('INSERT INTO "tsk_file_layout" VALUES(' + path + ', ' + ', '.join(fields_list[1:]) + ');')
+ # Remove object ID from Unalloc file name
+ newLine = re.sub('Unalloc_[0-9]+_', 'Unalloc_', newLine)
return newLine
# remove object ID
elif object_index:
@@ -557,6 +568,11 @@ def normalize_db_entry(line, files_table, vs_parts_table, vs_info_table, fs_info
parent_path = parent_path[parent_path.find('ModuleOutput'):]
if path and parent_path:
+ # Remove object ID from Unalloc file names and regripper output
+ path = re.sub('Unalloc_[0-9]+_', 'Unalloc_', path)
+ path = re.sub('regripper\-[0-9]+\-full', 'regripper-full', path)
+ parent_path = re.sub('Unalloc_[0-9]+_', 'Unalloc_', parent_path)
+ parent_path = re.sub('regripper\-[0-9]+\-full', 'regripper-full', parent_path)
return newLine + path + ', ' + parent_path + ', ' + ', '.join(fields_list[2:]) + ');'
else:
return line
diff --git a/thirdparty/markmckinnon/Export_Srudb_Linux b/thirdparty/markmckinnon/Export_Srudb_Linux
old mode 100644
new mode 100755
diff --git a/thirdparty/markmckinnon/Export_srudb_macos b/thirdparty/markmckinnon/Export_srudb_macos
old mode 100644
new mode 100755
diff --git a/thirdparty/markmckinnon/parse_prefetch_linux b/thirdparty/markmckinnon/parse_prefetch_linux
old mode 100644
new mode 100755
diff --git a/thirdparty/markmckinnon/parse_prefetch_macos b/thirdparty/markmckinnon/parse_prefetch_macos
old mode 100644
new mode 100755
diff --git a/unix_setup.sh b/unix_setup.sh
index edd6c36632..06fc655e32 100644
--- a/unix_setup.sh
+++ b/unix_setup.sh
@@ -72,6 +72,10 @@ else
echo "done"
fi
+# make sure thirdparty files are executable
+chmod u+x autopsy/markmckinnon/Export*
+chmod u+x autopsy/markmckinnon/parse*
+
# make sure it is executable
chmod u+x bin/autopsy