From 935827122de95e24584d7617a5d7e283ed5c4c74 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Tue, 24 Oct 2017 16:59:16 -0400 Subject: [PATCH 01/45] Updated Java doc --- .../AutoIngestDataSourceProcessor.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java index 11c0559795..163d164376 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/AutoIngestDataSourceProcessor.java @@ -43,9 +43,7 @@ public interface AutoIngestDataSourceProcessor extends DataSourceProcessor { * or less means the data source is not supported by the * DataSourceProcessor. Value of 100 indicates high certainty in * being able to process the data source. - * - * @throws - * org.sleuthkit.autopsy.corecomponentinterfaces.AutomatedIngestDataSourceProcessor.AutomatedIngestDataSourceProcessorException + * @throws org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException */ int canProcess(Path dataSourcePath) throws AutoIngestDataSourceProcessorException; @@ -65,9 +63,7 @@ public interface AutoIngestDataSourceProcessor extends DataSourceProcessor { * background task to report progress. * @param callBack Callback that will be used by the background task * to return results. - * - * @throws - * org.sleuthkit.autopsy.corecomponentinterfaces.AutomatedIngestDataSourceProcessor.AutomatedIngestDataSourceProcessorException + * @throws org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException */ void process(String deviceId, Path dataSourcePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) throws AutoIngestDataSourceProcessorException; From 112de482a4e3bfa7c09b5d450101ee22a094d7f7 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Tue, 24 Oct 2017 16:59:49 -0400 Subject: [PATCH 02/45] First cut at implementing the functionality of ArchiveDSP --- .../autoingest/AddArchiveTask.java | 70 +++++++++++++++++-- .../autoingest/AddDataSourceCallback.java | 7 ++ .../ArchiveExtractorDSProcessor.java | 8 +-- .../experimental/autoingest/ArchiveUtil.java | 7 +- .../autoingest/AutoIngestManager.java | 5 +- .../DataSourceProcessorUtility.java | 23 +++++- 6 files changed, 104 insertions(+), 16 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 3bc76e61fd..42c208ad58 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -22,12 +22,17 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; +import java.util.Map; +import java.util.UUID; import java.util.logging.Level; import org.apache.commons.io.FilenameUtils; import org.sleuthkit.autopsy.casemodule.Case; +import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; +import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; import org.sleuthkit.datamodel.Content; /* @@ -42,6 +47,7 @@ class AddArchiveTask implements Runnable { private final DataSourceProcessorProgressMonitor progressMonitor; private final DataSourceProcessorCallback callback; private boolean criticalErrorOccurred; + private final Object archiveDspLock; private static final String ARCHIVE_EXTRACTOR_MODULE_OUTPUT_DIR = "Archive Extractor"; @@ -62,6 +68,7 @@ class AddArchiveTask implements Runnable { this.archivePath = archivePath; this.callback = callback; this.progressMonitor = progressMonitor; + this.archiveDspLock = new Object(); } /** @@ -75,13 +82,18 @@ class AddArchiveTask implements Runnable { if (!ArchiveUtil.isArchive(Paths.get(archivePath))) { criticalErrorOccurred = true; logger.log(Level.SEVERE, String.format("Input data source is not a valid datasource: %s", archivePath)); //NON-NLS - errorMessages.add("Input data source is not a valid datasource: " + archivePath); + errorMessages.add("Input data source is not a valid datasource: " + archivePath); result = DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; callback.done(result, errorMessages, newDataSources); } // extract the archive and pass the extracted folder as input Path destinationFolder = Paths.get(""); + UUID taskId = UUID.randomUUID(); + if (callback instanceof AddDataSourceCallback) { + // if running as part of automated ingest - re-use the task ID + taskId = ((AddDataSourceCallback) callback).getTaskId(); + } try { Case currentCase = Case.getCurrentCase(); @@ -93,10 +105,58 @@ class AddArchiveTask implements Runnable { destinationFolder.toFile().mkdirs(); // extract contents of ZIP archive into destination folder - //ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); - + List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); + // do processing - + Map validDataSourceProcessorsMap; + for (String file : extractedFiles) { + Path filePath = Paths.get(file); + // identify DSP for this file + // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source + validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(filePath); + if (validDataSourceProcessorsMap.isEmpty()) { + continue; + } + + // Get an ordered list of data source processors to try + List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); + + // Try each DSP in decreasing order of confidence + for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { + + // skip local files and local disk DSPs, only looking for "valid" data sources + if (selectedProcessor instanceof LocalDiskDSProcessor) { + continue; + } + if (selectedProcessor instanceof LocalFilesDSProcessor) { + continue; + } + + //jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); + //SYS_LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); + try { + DataSource internalDataSource = new DataSource(deviceId, filePath); + DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); + selectedProcessor.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); + archiveDspLock.wait(); + + // at this point we got the content object(s) from the current DSP + newDataSources.addAll(internalDataSource.getContent()); + + return; + } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { + // Log that the current DSP failed and set the error flag. We consider it an error + // if a DSP fails even if a later one succeeds since we expected to be able to process + // the data source which each DSP on the list. + //AutoIngestAlertFile.create(caseDirectoryPath); + //currentJob.setErrorsOccurred(true); + //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); + criticalErrorOccurred = true; + errorMessages.add(ex.getMessage()); + logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{file, selectedProcessor.getDataSourceType()}); + } + } + } } catch (Exception ex) { criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); @@ -117,6 +177,6 @@ class AddArchiveTask implements Runnable { * Attempts to cancel adding the archive to the case database. */ public void cancelTask() { - + // do a cancelation via future instead } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java index db19fc2fbc..181ad4d4d2 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java @@ -54,6 +54,13 @@ class AddDataSourceCallback extends DataSourceProcessorCallback { this.taskId = taskId; this.lock = lock; } + + /** + * @return the taskId + */ + public UUID getTaskId() { + return taskId; + } /** * Called by the data source processor when it finishes running in its own diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index a649b0f37a..0123592593 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -35,10 +35,10 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; * add data source wizard. It also provides a run method overload to allow it to * be used independently of the wizard. */ -//@ServiceProviders(value={ -// @ServiceProvider(service=DataSourceProcessor.class), -// @ServiceProvider(service=AutoIngestDataSourceProcessor.class)} -//) +@ServiceProviders(value={ + @ServiceProvider(service=DataSourceProcessor.class), + @ServiceProvider(service=AutoIngestDataSourceProcessor.class)} +) @NbBundle.Messages({ "ArchiveDSP.dsType.text=Archive file"}) public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIngestDataSourceProcessor { diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java index b7a2926092..823390d56b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java @@ -214,10 +214,11 @@ final class ArchiveUtil { * @param destinationFolder Path to directory where results will be * extracted to. * + * @return List of file names contained within archive * @throws * ArchiveExtractionException */ - static void unpackArchiveFile(String archiveFilePath, String destinationFolder) throws ArchiveExtractionException { + static List unpackArchiveFile(String archiveFilePath, String destinationFolder) throws ArchiveExtractionException { if (!SevenZip.isInitializedSuccessfully() && (SevenZip.getLastInitializationException() == null)) { try { SevenZip.initSevenZipFromPlatformJAR(); @@ -225,6 +226,7 @@ final class ArchiveUtil { throw new ArchiveExtractionException("Unable to initialize 7Zip libraries", ex); } } + List files = new ArrayList<>(); ISevenZipInArchive inArchive = null; try { RandomAccessFile randomAccessFile = new RandomAccessFile(new File(archiveFilePath), "r"); @@ -251,6 +253,8 @@ final class ArchiveUtil { } } } + // keep track of extracted files + files.add(entryPathInArchive); } } catch (Exception ex) { throw new ArchiveExtractionException("Exception while unpacking archive contents", ex); @@ -263,6 +267,7 @@ final class ArchiveUtil { throw new ArchiveExtractionException("Exception while closing the archive", ex); } } + return files; } /** diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 2f2f22fb0d..5ece1a3dcf 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -2334,10 +2334,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang } // Get an ordered list of data source processors to try - List validDataSourceProcessors = validDataSourceProcessorsMap.entrySet().stream() - .sorted(Map.Entry.comparingByValue().reversed()) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); synchronized (ingestLock) { // Try each DSP in decreasing order of confidence diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java index 6f88d70a28..a8aafe4236 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java @@ -21,7 +21,9 @@ package org.sleuthkit.autopsy.experimental.autoingest; import java.nio.file.Path; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import org.openide.util.Lookup; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException; @@ -30,8 +32,8 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor. * A utility class to find Data Source Processors */ class DataSourceProcessorUtility { - - private DataSourceProcessorUtility() { + + private DataSourceProcessorUtility() { } /** @@ -60,4 +62,21 @@ class DataSourceProcessorUtility { return validDataSourceProcessorsMap; } + + /** + * A utility method to get an ordered list of data source processors. DSPs + * are ordered in descending order from highest confidence to lowest. + * + * @param validDataSourceProcessorsMap Hash map of all DSPs that can process + * the data source along with their confidence score + * @return Ordered list of data source processors + */ + static List orderDataSourceProcessorsByConfidence(Map validDataSourceProcessorsMap) { + List validDataSourceProcessors = validDataSourceProcessorsMap.entrySet().stream() + .sorted(Map.Entry.comparingByValue().reversed()) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); + + return validDataSourceProcessors; + } } From 90823a7f9d6ab20c0cc7b3321bd93e365905d30a Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Wed, 25 Oct 2017 14:23:57 -0400 Subject: [PATCH 03/45] Bug fixes, using progress updater --- .../experimental/autoingest/AddArchiveTask.java | 14 +++++++++++--- .../experimental/autoingest/ArchiveUtil.java | 2 +- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 42c208ad58..0a5b5ad8a3 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -76,6 +76,7 @@ class AddArchiveTask implements Runnable { */ @Override public void run() { + progressMonitor.setIndeterminate(true); List errorMessages = new ArrayList<>(); List newDataSources = new ArrayList<>(); DataSourceProcessorCallback.DataSourceProcessorResult result; @@ -88,7 +89,6 @@ class AddArchiveTask implements Runnable { } // extract the archive and pass the extracted folder as input - Path destinationFolder = Paths.get(""); UUID taskId = UUID.randomUUID(); if (callback instanceof AddDataSourceCallback) { // if running as part of automated ingest - re-use the task ID @@ -101,8 +101,14 @@ class AddArchiveTask implements Runnable { String dataSourceFileNameNoExt = FilenameUtils.getBaseName(archivePath); // create folder to extract archive to - destinationFolder = Paths.get(currentCase.getModuleDirectory(), ARCHIVE_EXTRACTOR_MODULE_OUTPUT_DIR, dataSourceFileNameNoExt + "_" + TimeStampUtils.createTimeStamp()); - destinationFolder.toFile().mkdirs(); + Path destinationFolder = Paths.get(currentCase.getModuleDirectory(), ARCHIVE_EXTRACTOR_MODULE_OUTPUT_DIR, dataSourceFileNameNoExt + "_" + TimeStampUtils.createTimeStamp()); + if (destinationFolder.toFile().mkdirs() == false) { + // unable to create directory + criticalErrorOccurred = true; + errorMessages.add("Unable to create directory for archive extraction " + destinationFolder.toString()); + logger.log(Level.SEVERE, "Unable to create directory for archive extraction {0}", destinationFolder.toString()); + return; + } // extract contents of ZIP archive into destination folder List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); @@ -110,6 +116,7 @@ class AddArchiveTask implements Runnable { // do processing Map validDataSourceProcessorsMap; for (String file : extractedFiles) { + progressMonitor.setProgressText(String.format("Adding: %s", file)); Path filePath = Paths.get(file); // identify DSP for this file // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source @@ -162,6 +169,7 @@ class AddArchiveTask implements Runnable { errorMessages.add(ex.getMessage()); logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS } finally { + progressMonitor.setProgress(100); if (criticalErrorOccurred) { result = DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; } else if (!errorMessages.isEmpty()) { diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java index 823390d56b..db2b954bca 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveUtil.java @@ -254,7 +254,7 @@ final class ArchiveUtil { } } // keep track of extracted files - files.add(entryPathInArchive); + files.add(fullPath.toString()); } } catch (Exception ex) { throw new ArchiveExtractionException("Exception while unpacking archive contents", ex); From ed697df912b704c632279a39e0d5dbdb5c6c50c0 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Wed, 25 Oct 2017 14:38:56 -0400 Subject: [PATCH 04/45] Bug fixes --- .../autoingest/AddArchiveTask.java | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 0a5b5ad8a3..8176db8810 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -89,7 +89,7 @@ class AddArchiveTask implements Runnable { } // extract the archive and pass the extracted folder as input - UUID taskId = UUID.randomUUID(); + UUID taskId = UUID.randomUUID(); // ELTODO: do we want to come with a way to re-use task id? if (callback instanceof AddDataSourceCallback) { // if running as part of automated ingest - re-use the task ID taskId = ((AddDataSourceCallback) callback).getTaskId(); @@ -141,26 +141,28 @@ class AddArchiveTask implements Runnable { //jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); //SYS_LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); - try { - DataSource internalDataSource = new DataSource(deviceId, filePath); - DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); - selectedProcessor.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); - archiveDspLock.wait(); - - // at this point we got the content object(s) from the current DSP - newDataSources.addAll(internalDataSource.getContent()); - - return; - } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { - // Log that the current DSP failed and set the error flag. We consider it an error - // if a DSP fails even if a later one succeeds since we expected to be able to process - // the data source which each DSP on the list. - //AutoIngestAlertFile.create(caseDirectoryPath); - //currentJob.setErrorsOccurred(true); - //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); - criticalErrorOccurred = true; - errorMessages.add(ex.getMessage()); - logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{file, selectedProcessor.getDataSourceType()}); + synchronized (archiveDspLock) { + try { + DataSource internalDataSource = new DataSource(deviceId, filePath); + DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); + selectedProcessor.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); + archiveDspLock.wait(); + + // at this point we got the content object(s) from the current DSP + newDataSources.addAll(internalDataSource.getContent()); + + break; // skip all other DSPs for this file + } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { + // Log that the current DSP failed and set the error flag. We consider it an error + // if a DSP fails even if a later one succeeds since we expected to be able to process + // the data source which each DSP on the list. + //AutoIngestAlertFile.create(caseDirectoryPath); + //currentJob.setErrorsOccurred(true); + //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); + criticalErrorOccurred = true; + errorMessages.add(ex.getMessage()); + logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{file, selectedProcessor.getDataSourceType()}); + } } } } From 950e534a9268e71f3e9947c32caef38c90a64ba8 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Wed, 25 Oct 2017 16:07:35 -0400 Subject: [PATCH 05/45] Adding archive file itself as a local file --- .../autoingest/AddArchiveTask.java | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 8176db8810..80c4bb958e 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -166,6 +166,32 @@ class AddArchiveTask implements Runnable { } } } + + // after all archive contents have been ingested - all the archive itself as a logical file + progressMonitor.setProgressText(String.format("Adding: %s", archivePath)); + LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); + synchronized (archiveDspLock) { + try { + Path filePath = Paths.get(archivePath); + DataSource internalDataSource = new DataSource(deviceId, filePath); + DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); + localFilesDSP.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); + archiveDspLock.wait(); + + // at this point we got the content object(s) from the current DSP + newDataSources.addAll(internalDataSource.getContent()); + } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { + // Log that the current DSP failed and set the error flag. We consider it an error + // if a DSP fails even if a later one succeeds since we expected to be able to process + // the data source which each DSP on the list. + //AutoIngestAlertFile.create(caseDirectoryPath); + //currentJob.setErrorsOccurred(true); + //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); + criticalErrorOccurred = true; + errorMessages.add(ex.getMessage()); + logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{archivePath, localFilesDSP.getDataSourceType()}); + } + } } catch (Exception ex) { criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); From 6e6db52632294926498e50753b7e6b2aaf7240e3 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Fri, 27 Oct 2017 16:41:29 -0400 Subject: [PATCH 06/45] Chnages according to latest design --- .../autoingest/AddArchiveTask.java | 98 ++++++++++++------- 1 file changed, 63 insertions(+), 35 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 80c4bb958e..c46067576a 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; +import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; @@ -25,7 +26,10 @@ import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Level; +import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; +import org.openide.filesystems.FileObject; +import org.openide.filesystems.FileUtil; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; @@ -89,7 +93,7 @@ class AddArchiveTask implements Runnable { } // extract the archive and pass the extracted folder as input - UUID taskId = UUID.randomUUID(); // ELTODO: do we want to come with a way to re-use task id? + UUID taskId = UUID.randomUUID(); if (callback instanceof AddDataSourceCallback) { // if running as part of automated ingest - re-use the task ID taskId = ((AddDataSourceCallback) callback).getTaskId(); @@ -97,12 +101,9 @@ class AddArchiveTask implements Runnable { try { Case currentCase = Case.getCurrentCase(); - // get file name without full path or extension - String dataSourceFileNameNoExt = FilenameUtils.getBaseName(archivePath); - // create folder to extract archive to - Path destinationFolder = Paths.get(currentCase.getModuleDirectory(), ARCHIVE_EXTRACTOR_MODULE_OUTPUT_DIR, dataSourceFileNameNoExt + "_" + TimeStampUtils.createTimeStamp()); - if (destinationFolder.toFile().mkdirs() == false) { + Path destinationFolder = createDirectoryForFile(archivePath, currentCase.getModuleDirectory()); + if (destinationFolder.toString().isEmpty()) { // unable to create directory criticalErrorOccurred = true; errorMessages.add("Unable to create directory for archive extraction " + destinationFolder.toString()); @@ -117,10 +118,9 @@ class AddArchiveTask implements Runnable { Map validDataSourceProcessorsMap; for (String file : extractedFiles) { progressMonitor.setProgressText(String.format("Adding: %s", file)); - Path filePath = Paths.get(file); // identify DSP for this file // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source - validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(filePath); + validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(Paths.get(file)); if (validDataSourceProcessorsMap.isEmpty()) { continue; } @@ -138,14 +138,32 @@ class AddArchiveTask implements Runnable { if (selectedProcessor instanceof LocalFilesDSProcessor) { continue; } + // also skip nested archive files, those will be ingested as logical files and extracted during ingest + if (selectedProcessor instanceof ArchiveExtractorDSProcessor) { + continue; + } + + // identified a "valid" data source within the archive. + // Move it to a different folder + Path newFolder = createDirectoryForFile(file, currentCase.getModuleDirectory()); + if (newFolder.toString().isEmpty()) { + // unable to create directory + criticalErrorOccurred = true; + errorMessages.add("Unable to create directory for archive extraction " + newFolder.toString()); + logger.log(Level.SEVERE, "Unable to create directory for archive extraction {0}", newFolder.toString()); + return; + } + + FileUtils.moveFileToDirectory(new File(file), newFolder.toFile(), false); + Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); //jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); //SYS_LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); synchronized (archiveDspLock) { try { - DataSource internalDataSource = new DataSource(deviceId, filePath); + DataSource internalDataSource = new DataSource(deviceId, newFilePath); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); - selectedProcessor.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); + selectedProcessor.process(deviceId, newFilePath, progressMonitor, internalArchiveDspCallBack); archiveDspLock.wait(); // at this point we got the content object(s) from the current DSP @@ -161,37 +179,33 @@ class AddArchiveTask implements Runnable { //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); - logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{file, selectedProcessor.getDataSourceType()}); + logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{newFilePath.toString(), selectedProcessor.getDataSourceType()}); } } } } - // after all archive contents have been ingested - all the archive itself as a logical file - progressMonitor.setProgressText(String.format("Adding: %s", archivePath)); - LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); + // after all archive contents have been examined, add remaining extracted contents as one logical file set + progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); synchronized (archiveDspLock) { - try { - Path filePath = Paths.get(archivePath); - DataSource internalDataSource = new DataSource(deviceId, filePath); - DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); - localFilesDSP.process(deviceId, filePath, progressMonitor, internalArchiveDspCallBack); - archiveDspLock.wait(); - - // at this point we got the content object(s) from the current DSP - newDataSources.addAll(internalDataSource.getContent()); - } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { - // Log that the current DSP failed and set the error flag. We consider it an error - // if a DSP fails even if a later one succeeds since we expected to be able to process - // the data source which each DSP on the list. - //AutoIngestAlertFile.create(caseDirectoryPath); - //currentJob.setErrorsOccurred(true); - //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); - criticalErrorOccurred = true; - errorMessages.add(ex.getMessage()); - logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{archivePath, localFilesDSP.getDataSourceType()}); - } - } + DataSource internalDataSource = new DataSource(deviceId, destinationFolder); + DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); + + // folder where archive was extracted to + List pathsList = new ArrayList<>(); + pathsList.add(destinationFolder.toString()); + + // use archive file name as the name of the logical file set + String archiveFileName = FilenameUtils.getName(archivePath); + + LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); + localFilesDSP.run(deviceId, archiveFileName, pathsList, progressMonitor, internalArchiveDspCallBack); + + archiveDspLock.wait(); + + // at this point we got the content object(s) from the current DSP + newDataSources.addAll(internalDataSource.getContent()); + } } catch (Exception ex) { criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); @@ -208,6 +222,20 @@ class AddArchiveTask implements Runnable { callback.done(result, errorMessages, newDataSources); } } + + + private Path createDirectoryForFile(String fileName, String baseDirectory) { + // get file name without full path or extension + String fileNameNoExt = FilenameUtils.getBaseName(fileName); + + // create folder to extract archive to + Path newFolder = Paths.get(baseDirectory, ARCHIVE_EXTRACTOR_MODULE_OUTPUT_DIR, fileNameNoExt + "_" + TimeStampUtils.createTimeStamp()); + if (newFolder.toFile().mkdirs() == false) { + // unable to create directory + return Paths.get(""); + } + return newFolder; + } /* * Attempts to cancel adding the archive to the case database. From 2b59bf85648baef4751afb5fd7083b781a0c13c5 Mon Sep 17 00:00:00 2001 From: esaunders Date: Mon, 30 Oct 2017 16:35:21 -0400 Subject: [PATCH 07/45] Update comment to hopefully clear up why all keyword hits are processed in loadPageInfoFromHits() --- .../sleuthkit/autopsy/keywordsearch/HighlightedText.java | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java index b8bb93b240..57e8aa91c4 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/HighlightedText.java @@ -205,7 +205,13 @@ class HighlightedText implements IndexedText { */ synchronized private void loadPageInfoFromHits() { isLiteral = hits.getQuery().isLiteral(); - //organize the hits by page, filter as needed + + /** + * Organize the hits by page, filter as needed. + * We process *every* keyword here because in the case of a regular + * expression search there may be multiple different keyword + * hits located in different chunks for the same file/artifact. + */ for (Keyword k : hits.getKeywords()) { for (KeywordHit hit : hits.getResults(k)) { int chunkID = hit.getChunkId(); From 4a17cf3f75f7aa0d25fc23599d5750d886c48791 Mon Sep 17 00:00:00 2001 From: esaunders Date: Mon, 30 Oct 2017 16:39:55 -0400 Subject: [PATCH 08/45] Don't store document id string in the hit. --- .../src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java index e51c1c2620..196e0e7c39 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java @@ -38,7 +38,6 @@ class KeywordHit implements Comparable { private static final String GET_CONTENT_ID_FROM_ARTIFACT_ID = "SELECT obj_id FROM blackboard_artifacts WHERE artifact_id = "; - private final String solrDocumentId; private final long solrObjectId; private final int chunkId; private final String snippet; @@ -63,7 +62,6 @@ class KeywordHit implements Comparable { KeywordHit(String solrDocumentId, String snippet, String hit) throws TskCoreException { this.snippet = StringUtils.stripToEmpty(snippet); this.hit = hit; - this.solrDocumentId = solrDocumentId; /* * Parse the Solr document id to get the Solr object id and chunk id. @@ -112,7 +110,7 @@ class KeywordHit implements Comparable { } String getSolrDocumentId() { - return this.solrDocumentId; + return Long.toString(solrObjectId) + Server.CHUNK_ID_SEPARATOR + Long.toString(chunkId); } long getSolrObjectId() { From 5d2a632011c95d6da9e487c30a1491523948363a Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 30 Oct 2017 16:39:58 -0400 Subject: [PATCH 09/45] Improvements --- .../autoingest/AddArchiveTask.java | 96 +++++++++++-------- 1 file changed, 56 insertions(+), 40 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index c46067576a..0b121c902c 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -28,8 +28,6 @@ import java.util.UUID; import java.util.logging.Level; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; -import org.openide.filesystems.FileObject; -import org.openide.filesystems.FileUtil; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; @@ -40,8 +38,8 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; import org.sleuthkit.datamodel.Content; /* - * A runnable that adds an archive data source as well as data sources - * contained in the archive to the case database. + * A runnable that adds an archive data source as well as data sources contained + * in the archive to the case database. */ class AddArchiveTask implements Runnable { @@ -59,13 +57,13 @@ class AddArchiveTask implements Runnable { * Constructs a runnable task that adds an archive as well as data sources * contained in the archive to the case database. * - * @param deviceId An ASCII-printable identifier for the device associated - * with the data source that is intended to be unique across multiple cases - * (e.g., a UUID). - * @param archivePath Path to the archive file. + * @param deviceId An ASCII-printable identifier for the device + * associated with the data source that is intended + * to be unique across multiple cases (e.g., a UUID). + * @param archivePath Path to the archive file. * @param progressMonitor Progress monitor to report progress during - * processing. - * @param callback Callback to call when processing is done. + * processing. + * @param callback Callback to call when processing is done. */ AddArchiveTask(String deviceId, String archivePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) { this.deviceId = deviceId; @@ -106,8 +104,8 @@ class AddArchiveTask implements Runnable { if (destinationFolder.toString().isEmpty()) { // unable to create directory criticalErrorOccurred = true; - errorMessages.add("Unable to create directory for archive extraction " + destinationFolder.toString()); - logger.log(Level.SEVERE, "Unable to create directory for archive extraction {0}", destinationFolder.toString()); + errorMessages.add(String.format("Unable to create directory {0} to extract archive {1} ", new Object[]{destinationFolder.toString(), archivePath})); + logger.log(Level.SEVERE, String.format("Unable to create directory {0} to extract archive {1} ", new Object[]{destinationFolder.toString(), archivePath})); return; } @@ -117,11 +115,19 @@ class AddArchiveTask implements Runnable { // do processing Map validDataSourceProcessorsMap; for (String file : extractedFiles) { - progressMonitor.setProgressText(String.format("Adding: %s", file)); + // identify DSP for this file - // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source - validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(Paths.get(file)); - if (validDataSourceProcessorsMap.isEmpty()) { + try { + // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source + validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(Paths.get(file)); + if (validDataSourceProcessorsMap.isEmpty()) { + continue; + } + } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { + criticalErrorOccurred = true; + errorMessages.add(ex.getMessage()); + logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS + // continue to next extracted file continue; } @@ -130,7 +136,7 @@ class AddArchiveTask implements Runnable { // Try each DSP in decreasing order of confidence for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { - + // skip local files and local disk DSPs, only looking for "valid" data sources if (selectedProcessor instanceof LocalDiskDSProcessor) { continue; @@ -142,23 +148,35 @@ class AddArchiveTask implements Runnable { if (selectedProcessor instanceof ArchiveExtractorDSProcessor) { continue; } - - // identified a "valid" data source within the archive. - // Move it to a different folder + + // identified a "valid" data source within the archive + progressMonitor.setProgressText(String.format("Adding: %s", file)); + + /* + * NOTE: we have to move the valid data sources to a + * separate folder and then add the data source from that + * folder. This is necessary because after all valid data + * sources have been identified, we are going to add the + * remaining extracted contents of the archive as a single + * logacl file set. Hence, if we do not move the data + * sources out of the extracted contents folder, those data + * source files will get added twice and can potentially + * result in duplicate keyword hits. + */ Path newFolder = createDirectoryForFile(file, currentCase.getModuleDirectory()); if (newFolder.toString().isEmpty()) { // unable to create directory criticalErrorOccurred = true; - errorMessages.add("Unable to create directory for archive extraction " + newFolder.toString()); - logger.log(Level.SEVERE, "Unable to create directory for archive extraction {0}", newFolder.toString()); + errorMessages.add(String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); + logger.log(Level.SEVERE, String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); return; } - + + // Move it to a different folder FileUtils.moveFileToDirectory(new File(file), newFolder.toFile(), false); Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); - - //jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); - //SYS_LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); + + // ELTBD - do we want to log this in case log and/or system admin log? synchronized (archiveDspLock) { try { DataSource internalDataSource = new DataSource(deviceId, newFilePath); @@ -169,14 +187,12 @@ class AddArchiveTask implements Runnable { // at this point we got the content object(s) from the current DSP newDataSources.addAll(internalDataSource.getContent()); - break; // skip all other DSPs for this file + // skip all other DSPs for this data source + break; } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { // Log that the current DSP failed and set the error flag. We consider it an error // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. - //AutoIngestAlertFile.create(caseDirectoryPath); - //currentJob.setErrorsOccurred(true); - //jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); logger.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{newFilePath.toString(), selectedProcessor.getDataSourceType()}); @@ -184,25 +200,26 @@ class AddArchiveTask implements Runnable { } } } - - // after all archive contents have been examined, add remaining extracted contents as one logical file set + + // after all archive contents have been examined (and moved to separate folders if necessary), + // add remaining extracted contents as one logical file set progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); synchronized (archiveDspLock) { - DataSource internalDataSource = new DataSource(deviceId, destinationFolder); + DataSource internalDataSource = new DataSource(deviceId, destinationFolder); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); - + // folder where archive was extracted to List pathsList = new ArrayList<>(); pathsList.add(destinationFolder.toString()); - + // use archive file name as the name of the logical file set String archiveFileName = FilenameUtils.getName(archivePath); - + LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); localFilesDSP.run(deviceId, archiveFileName, pathsList, progressMonitor, internalArchiveDspCallBack); - + archiveDspLock.wait(); - + // at this point we got the content object(s) from the current DSP newDataSources.addAll(internalDataSource.getContent()); } @@ -222,8 +239,7 @@ class AddArchiveTask implements Runnable { callback.done(result, errorMessages, newDataSources); } } - - + private Path createDirectoryForFile(String fileName, String baseDirectory) { // get file name without full path or extension String fileNameNoExt = FilenameUtils.getBaseName(fileName); From 988fc1b40bcda1264f2a7b714b6db41e6ef26f57 Mon Sep 17 00:00:00 2001 From: esaunders Date: Mon, 30 Oct 2017 17:00:35 -0400 Subject: [PATCH 10/45] Removed contentID and isArtifactHit from KeywordHit. --- .../autopsy/keywordsearch/KeywordHit.java | 66 ++++++++++--------- .../autopsy/keywordsearch/RegexQuery.java | 4 +- .../keywordsearch/TermsComponentQuery.java | 2 +- 3 files changed, 36 insertions(+), 36 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java index 196e0e7c39..f9284e81d9 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordHit.java @@ -41,8 +41,6 @@ class KeywordHit implements Comparable { private final long solrObjectId; private final int chunkId; private final String snippet; - private final long contentID; - private final boolean hitOnArtifact; private final String hit; /** @@ -55,11 +53,8 @@ class KeywordHit implements Comparable { * For some searches (ie substring, regex) this will be * different than the search term. * - * @throws TskCoreException If there is a problem getting the underlying - * content associated with a hit on the text of an - * artifact. */ - KeywordHit(String solrDocumentId, String snippet, String hit) throws TskCoreException { + KeywordHit(String solrDocumentId, String snippet, String hit) { this.snippet = StringUtils.stripToEmpty(snippet); this.hit = hit; @@ -81,28 +76,6 @@ class KeywordHit implements Comparable { this.solrObjectId = Long.parseLong(split[0]); this.chunkId = Integer.parseInt(split[1]); } - - //artifacts have negative obj ids - hitOnArtifact = this.solrObjectId < 0; - - if (hitOnArtifact) { - // If the hit was in an artifact, look up the source content for the artifact. - SleuthkitCase caseDb = Case.getCurrentCase().getSleuthkitCase(); - try (SleuthkitCase.CaseDbQuery executeQuery = - caseDb.executeQuery(GET_CONTENT_ID_FROM_ARTIFACT_ID + this.solrObjectId); - ResultSet resultSet = executeQuery.getResultSet();) { - if (resultSet.next()) { - contentID = resultSet.getLong("obj_id"); - } else { - throw new TskCoreException("Failed to get obj_id for artifact with artifact_id =" + this.solrObjectId + ". No matching artifact was found."); - } - } catch (SQLException ex) { - throw new TskCoreException("Error getting obj_id for artifact with artifact_id =" + this.solrObjectId, ex); - } - } else { - //else the object id is for content. - contentID = this.solrObjectId; - } } String getHit() { @@ -129,8 +102,36 @@ class KeywordHit implements Comparable { return this.snippet; } - long getContentID() { - return this.contentID; + /** + * Get the content id associated with the content underlying hit. + * For hits on files this will be the same as the object id associated + * with the file. For hits on artifacts we look up the id of the object + * that produced the artifact. + * + * @return The id of the underlying content associated with the hit. + * @throws TskCoreException If there is a problem getting the underlying + * content associated with a hit on the text of an + * artifact. + */ + long getContentID() throws TskCoreException { + if (isArtifactHit()) { + // If the hit was in an artifact, look up the source content for the artifact. + SleuthkitCase caseDb = Case.getCurrentCase().getSleuthkitCase(); + try (SleuthkitCase.CaseDbQuery executeQuery = + caseDb.executeQuery(GET_CONTENT_ID_FROM_ARTIFACT_ID + this.solrObjectId); + ResultSet resultSet = executeQuery.getResultSet();) { + if (resultSet.next()) { + return resultSet.getLong("obj_id"); + } else { + throw new TskCoreException("Failed to get obj_id for artifact with artifact_id =" + this.solrObjectId + ". No matching artifact was found."); + } + } catch (SQLException ex) { + throw new TskCoreException("Error getting obj_id for artifact with artifact_id =" + this.solrObjectId, ex); + } + } else { + //else the object id is for content. + return this.solrObjectId; + } } /** @@ -139,7 +140,8 @@ class KeywordHit implements Comparable { * @return */ boolean isArtifactHit() { - return hitOnArtifact; + // artifacts have negative obj ids + return this.solrObjectId < 0; } /** @@ -148,7 +150,7 @@ class KeywordHit implements Comparable { * @return The artifact whose indexed text this hit is in. */ Optional getArtifactID() { - if (hitOnArtifact) { + if (isArtifactHit()) { return Optional.of(solrObjectId); } else { return Optional.empty(); diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java index 392a5e8bc8..6f1a574451 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java @@ -316,8 +316,6 @@ final class RegexQuery implements KeywordSearchQuery { } } - } catch (TskCoreException ex) { - throw ex; } catch (Throwable error) { /* * NOTE: Matcher.find() is known to throw StackOverflowError in rare @@ -447,7 +445,7 @@ final class RegexQuery implements KeywordSearchQuery { if (hit.isArtifactHit()) { LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getArtifactID().get())); //NON-NLS } else { - LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getContentID())); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", foundKeyword.getSearchTerm(), hit.getSnippet(), hit.getSolrObjectId())); //NON-NLS } return null; } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermsComponentQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermsComponentQuery.java index 58087c4590..5953f6f35f 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermsComponentQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/TermsComponentQuery.java @@ -366,7 +366,7 @@ final class TermsComponentQuery implements KeywordSearchQuery { if (hit.isArtifactHit()) { LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for artifact keyword hit: term = %s, snippet = '%s', artifact id = %d", searchTerm, hit.getSnippet(), hit.getArtifactID().get())); //NON-NLS } else { - LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", searchTerm, hit.getSnippet(), hit.getContentID())); //NON-NLS + LOGGER.log(Level.SEVERE, String.format("Failed to parse credit card account number for content keyword hit: term = %s, snippet = '%s', object id = %d", searchTerm, hit.getSnippet(), hit.getSolrObjectId())); //NON-NLS } return null; } From 33a72835a4ae713e10864877db232e4316a9f52f Mon Sep 17 00:00:00 2001 From: esaunders Date: Mon, 30 Oct 2017 17:17:45 -0400 Subject: [PATCH 11/45] Reduce number of string instances created by re-using existing hit string for both the Keyword and KeywordHit instances. --- .../autopsy/keywordsearch/RegexQuery.java | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java index 6f1a574451..acefc554a4 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java @@ -100,6 +100,16 @@ final class RegexQuery implements KeywordSearchQuery { private String escapedQuery; private String field = Server.Schema.CONTENT_STR.toString(); + /** + * The following map is an optimization to ensure that we are referencing + * the same keyword hit String object in both the KeywordHit instance and + * it's associated Keyword instance. Even though we benefit from G1GC + * String deduplication, the overhead associated with both Keyword and + * KeywordHit maintaining their own reference can be significant when the + * number of hits gets large. + */ + private final HashMap keywordsFoundAcrossAllDocuments; + /** * Constructor with query to process. * @@ -113,6 +123,7 @@ final class RegexQuery implements KeywordSearchQuery { this.queryStringContainsWildcardPrefix = this.keywordString.startsWith(".*"); this.queryStringContainsWildcardSuffix = this.keywordString.endsWith(".*"); + this.keywordsFoundAcrossAllDocuments = new HashMap<>(); } @Override @@ -273,6 +284,14 @@ final class RegexQuery implements KeywordSearchQuery { hit = hit.replaceAll("[^0-9]$", ""); } + // Optimization to reduce the number of String objects created. + if (keywordsFoundAcrossAllDocuments.containsKey(hit)) { + // Use an existing String reference if it exists. + hit = keywordsFoundAcrossAllDocuments.get(hit); + } else { + keywordsFoundAcrossAllDocuments.put(hit, hit); + } + if (artifactAttributeType == null) { hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit)); } else { @@ -303,7 +322,7 @@ final class RegexQuery implements KeywordSearchQuery { final String group = ccnMatcher.group("ccn"); if (CreditCardValidator.isValidCCN(group)) { hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit)); - }; + } } } From b4456ff85355a3f08933c25afea40fbbeb0a4940 Mon Sep 17 00:00:00 2001 From: esaunders Date: Mon, 30 Oct 2017 17:21:58 -0400 Subject: [PATCH 12/45] Provided a more accurate comment for the keywordsFoundAcrossAllDocuments member. --- .../sleuthkit/autopsy/keywordsearch/RegexQuery.java | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java index acefc554a4..9c92cdea5d 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java @@ -101,12 +101,11 @@ final class RegexQuery implements KeywordSearchQuery { private String field = Server.Schema.CONTENT_STR.toString(); /** - * The following map is an optimization to ensure that we are referencing - * the same keyword hit String object in both the KeywordHit instance and - * it's associated Keyword instance. Even though we benefit from G1GC - * String deduplication, the overhead associated with both Keyword and - * KeywordHit maintaining their own reference can be significant when the - * number of hits gets large. + * The following map is an optimization to ensure that we reuse + * the same keyword hit String object across all hits. Even though we + * benefit from G1GC String deduplication, the overhead associated with + * creating a new String object for every KeywordHit can be significant + * when the number of hits gets large. */ private final HashMap keywordsFoundAcrossAllDocuments; From 761884534a36da52060c164e084b25f20451ff71 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\zhaohui" Date: Thu, 28 Sep 2017 11:58:33 -0400 Subject: [PATCH 13/45] 3095: Expanding Data Sources Node in nightly test --- .../autopsy/testing/AutopsyTestCases.java | 24 +++++++++++++++++++ .../autopsy/testing/RegressionTest.java | 5 ++++ 2 files changed, 29 insertions(+) diff --git a/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java b/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java index 672ca5b22b..c716081672 100755 --- a/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java +++ b/Testing/src/org/sleuthkit/autopsy/testing/AutopsyTestCases.java @@ -36,6 +36,7 @@ import java.util.logging.Logger; import javax.imageio.ImageIO; import javax.swing.JDialog; import javax.swing.text.JTextComponent; +import javax.swing.tree.TreePath; import org.netbeans.jellytools.MainWindowOperator; import org.netbeans.jellytools.NbDialogOperator; import org.netbeans.jellytools.WizardOperator; @@ -53,6 +54,8 @@ import org.netbeans.jemmy.operators.JTabbedPaneOperator; import org.netbeans.jemmy.operators.JTableOperator; import org.netbeans.jemmy.operators.JTextFieldOperator; import org.netbeans.jemmy.operators.JToggleButtonOperator; +import org.netbeans.jemmy.operators.JTreeOperator; +import org.netbeans.jemmy.operators.JTreeOperator.NoSuchPathException; import org.sleuthkit.autopsy.core.UserPreferences; import org.sleuthkit.autopsy.core.UserPreferencesException; import org.sleuthkit.autopsy.events.MessageServiceConnectionInfo; @@ -290,6 +293,16 @@ public class AutopsyTestCases { } + public void testExpandDataSourcesTree() { + logger.info("Data Sources Node"); + MainWindowOperator mwo = MainWindowOperator.getDefault(); + JTreeOperator jto = new JTreeOperator(mwo, "Data Sources"); + String [] nodeNames = {"Data Sources"}; + TreePath tp = jto.findPath(nodeNames); + expandNodes(jto, tp); + screenshot("Data Sources Tree"); + } + public void testGenerateReportToolbar() { logger.info("Generate Report Toolbars"); MainWindowOperator mwo = MainWindowOperator.getDefault(); @@ -380,4 +393,15 @@ public class AutopsyTestCases { logger.log(Level.SEVERE, "Error saving messaging service connection info", ex); //NON-NLS } } + + private void expandNodes (JTreeOperator jto, TreePath tp) { + try { + jto.expandPath(tp); + for (TreePath t : jto.getChildPaths(tp)) { + expandNodes(jto, t); + } + } catch (NoSuchPathException ne) { + logger.log(Level.SEVERE, "Error expanding tree path", ne); + } + } } diff --git a/Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java b/Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java index 674395e0f1..0518da865b 100755 --- a/Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java +++ b/Testing/test/qa-functional/src/org/sleuthkit/autopsy/testing/RegressionTest.java @@ -69,6 +69,7 @@ public class RegressionTest extends TestCase { "testConfigureSearch", "testAddSourceWizard1", "testIngest", + "testExpandDataSourcesTree", //After do ingest, before generate report, we expand Data Sources node "testGenerateReportToolbar", "testGenerateReportButton"); } @@ -83,6 +84,7 @@ public class RegressionTest extends TestCase { "testConfigureSearch", "testAddSourceWizard1", "testIngest", + "testExpandDataSourcesTree", "testGenerateReportToolbar", "testGenerateReportButton"); } @@ -147,6 +149,9 @@ public class RegressionTest extends TestCase { autopsyTests.testIngest(); } + public void testExpandDataSourcesTree() { + autopsyTests.testExpandDataSourcesTree(); + } public void testGenerateReportToolbar() { autopsyTests.testGenerateReportToolbar(); } From bd32abf036b6a36d8efc19d2054ec211458a1ff0 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 2 Nov 2017 15:37:49 -0400 Subject: [PATCH 14/45] Added cancelation via Future --- .../autoingest/AddArchiveTask.java | 12 +++++------- .../ArchiveExtractorDSProcessor.java | 19 ++++++++++++++++--- 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 0b121c902c..add8617ae9 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -89,6 +89,8 @@ class AddArchiveTask implements Runnable { result = DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; callback.done(result, errorMessages, newDataSources); } + + logger.log(Level.INFO, "Using Archive Extractor DSP to process archive {0} ", archivePath); // extract the archive and pass the extracted folder as input UUID taskId = UUID.randomUUID(); @@ -151,6 +153,7 @@ class AddArchiveTask implements Runnable { // identified a "valid" data source within the archive progressMonitor.setProgressText(String.format("Adding: %s", file)); + logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); /* * NOTE: we have to move the valid data sources to a @@ -204,6 +207,7 @@ class AddArchiveTask implements Runnable { // after all archive contents have been examined (and moved to separate folders if necessary), // add remaining extracted contents as one logical file set progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); + logger.log(Level.INFO, "Adding directory {0} as logical file set", destinationFolder.toString()); synchronized (archiveDspLock) { DataSource internalDataSource = new DataSource(deviceId, destinationFolder); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); @@ -228,6 +232,7 @@ class AddArchiveTask implements Runnable { errorMessages.add(ex.getMessage()); logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS } finally { + logger.log(Level.INFO, "Finished processing of archive {0}", archivePath); progressMonitor.setProgress(100); if (criticalErrorOccurred) { result = DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; @@ -252,11 +257,4 @@ class AddArchiveTask implements Runnable { } return newFolder; } - - /* - * Attempts to cancel adding the archive to the case database. - */ - public void cancelTask() { - // do a cancelation via future instead - } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index 0123592593..0e75e4c200 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -18,9 +18,15 @@ */ package org.sleuthkit.autopsy.experimental.autoingest; +import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.nio.file.Path; import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; import javax.swing.JPanel; +import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProviders; @@ -50,6 +56,9 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng private String archivePath; private boolean setDataSourceOptionsCalled; + private final ExecutorService jobProcessingExecutor; + private Future jobProcessingTaskFuture; + private static final String ARCHIVE_DSP_THREAD_NAME = "Archive-DSP-%d"; private AddArchiveTask addArchiveTask; /** @@ -60,6 +69,7 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng */ public ArchiveExtractorDSProcessor() { configPanel = ArchiveFilePanel.createInstance(ArchiveExtractorDSProcessor.class.getName(), ArchiveUtil.getArchiveFilters()); + jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(ARCHIVE_DSP_THREAD_NAME).build()); } @Override @@ -151,7 +161,7 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng */ public void run(String deviceId, String archivePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) { addArchiveTask = new AddArchiveTask(deviceId, archivePath, progressMonitor, callback); - new Thread(addArchiveTask).start(); + jobProcessingTaskFuture = jobProcessingExecutor.submit(addArchiveTask); } /** @@ -163,8 +173,11 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng */ @Override public void cancel() { - if (null != addArchiveTask) { - addArchiveTask.cancelTask(); + if (null != jobProcessingTaskFuture) { + jobProcessingTaskFuture.cancel(true); + jobProcessingExecutor.shutdownNow(); + // ELTBD - do we want to wait for the cancellation to complete? I think not, + // given that the cancelation is of "best effort" variety } } From cb3ebedf9cd2840a4ea52724eafe522bd897bf20 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 2 Nov 2017 15:38:12 -0400 Subject: [PATCH 15/45] Progress monitor and logging updates --- .../autopsy/experimental/autoingest/AddArchiveTask.java | 3 ++- .../experimental/autoingest/ArchiveExtractorDSProcessor.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index add8617ae9..98f36d6273 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -111,7 +111,8 @@ class AddArchiveTask implements Runnable { return; } - // extract contents of ZIP archive into destination folder + // extract contents of ZIP archive into destination folder + progressMonitor.setProgressText(String.format("Extracting archive contents to: %s", destinationFolder.toString())); List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); // do processing diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index 0e75e4c200..dee9879dec 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -175,7 +175,7 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng public void cancel() { if (null != jobProcessingTaskFuture) { jobProcessingTaskFuture.cancel(true); - jobProcessingExecutor.shutdownNow(); + jobProcessingExecutor.shutdown(); // ELTBD - do we want to wait for the cancellation to complete? I think not, // given that the cancelation is of "best effort" variety } From 17f8112f6642d87c8893dd8e41cb08ce70ff9c95 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 2 Nov 2017 17:35:01 -0400 Subject: [PATCH 16/45] Lots of optimizations and bug fixes --- .../autoingest/AddArchiveTask.java | 151 +++++++++++------- 1 file changed, 95 insertions(+), 56 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 98f36d6273..b14255caff 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -22,16 +22,19 @@ import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Level; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; +import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; +import static org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; @@ -116,70 +119,45 @@ class AddArchiveTask implements Runnable { List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); // do processing - Map validDataSourceProcessorsMap; - for (String file : extractedFiles) { - - // identify DSP for this file - try { - // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source - validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(Paths.get(file)); - if (validDataSourceProcessorsMap.isEmpty()) { - continue; - } - } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { - criticalErrorOccurred = true; - errorMessages.add(ex.getMessage()); - logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS - // continue to next extracted file + for (String file : extractedFiles) { + // identify all "valid" DSPs that can process this file + List validDataSourceProcessors = getValidDataSourceProcessors(Paths.get(file), errorMessages); + if (validDataSourceProcessors.isEmpty()) { continue; } - // Get an ordered list of data source processors to try - List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); + // identified a "valid" data source within the archive + progressMonitor.setProgressText(String.format("Adding: %s", file)); + + /* + * NOTE: we have to move the valid data sources to a separate + * folder and then add the data source from that folder. This is + * necessary because after all valid data sources have been + * identified, we are going to add the remaining extracted + * contents of the archive as a single logacl file set. Hence, + * if we do not move the data sources out of the extracted + * contents folder, those data source files will get added twice + * and can potentially result in duplicate keyword hits. + */ + Path newFolder = createDirectoryForFile(file, currentCase.getModuleDirectory()); + if (newFolder.toString().isEmpty()) { + // unable to create directory + criticalErrorOccurred = true; + errorMessages.add(String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); + logger.log(Level.SEVERE, String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); + return; + } + + // Copy it to a different folder + FileUtils.copyFileToDirectory(new File(file), newFolder.toFile()); + Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); // Try each DSP in decreasing order of confidence + boolean success = false; for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { - // skip local files and local disk DSPs, only looking for "valid" data sources - if (selectedProcessor instanceof LocalDiskDSProcessor) { - continue; - } - if (selectedProcessor instanceof LocalFilesDSProcessor) { - continue; - } - // also skip nested archive files, those will be ingested as logical files and extracted during ingest - if (selectedProcessor instanceof ArchiveExtractorDSProcessor) { - continue; - } - - // identified a "valid" data source within the archive - progressMonitor.setProgressText(String.format("Adding: %s", file)); logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); - /* - * NOTE: we have to move the valid data sources to a - * separate folder and then add the data source from that - * folder. This is necessary because after all valid data - * sources have been identified, we are going to add the - * remaining extracted contents of the archive as a single - * logacl file set. Hence, if we do not move the data - * sources out of the extracted contents folder, those data - * source files will get added twice and can potentially - * result in duplicate keyword hits. - */ - Path newFolder = createDirectoryForFile(file, currentCase.getModuleDirectory()); - if (newFolder.toString().isEmpty()) { - // unable to create directory - criticalErrorOccurred = true; - errorMessages.add(String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); - logger.log(Level.SEVERE, String.format("Unable to create directory {0} to extract content of archive {1} ", new Object[]{newFolder.toString(), archivePath})); - return; - } - - // Move it to a different folder - FileUtils.moveFileToDirectory(new File(file), newFolder.toFile(), false); - Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); - // ELTBD - do we want to log this in case log and/or system admin log? synchronized (archiveDspLock) { try { @@ -188,7 +166,16 @@ class AddArchiveTask implements Runnable { selectedProcessor.process(deviceId, newFilePath, progressMonitor, internalArchiveDspCallBack); archiveDspLock.wait(); - // at this point we got the content object(s) from the current DSP + // at this point we got the content object(s) from the current DSP. + // check whether the data source was processed successfully + if ((internalDataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS) || + internalDataSource.getContent().isEmpty()) { + // move onto the the next DSP that can process this data source + continue; + } + + // if we are here it means the data source was addedd successfully + success = true; newDataSources.addAll(internalDataSource.getContent()); // skip all other DSPs for this data source @@ -203,6 +190,18 @@ class AddArchiveTask implements Runnable { } } } + + if (success) { + // one of the DSPs successfully processed the data source. delete the + // copy of the data source in the original extracted archive folder. + // otherwise the data source is going to be added again as a logical file. + FileUtils.deleteQuietly(Paths.get(file).toFile()); + } else { + // none of the DSPs were able to process the data source. delete the + // copy of the data source in the temporary folder. the data source is + // going to be added as a logical file with the rest of the extracted contents. + FileUtils.deleteQuietly(newFolder.toFile()); + } } // after all archive contents have been examined (and moved to separate folders if necessary), @@ -245,6 +244,46 @@ class AddArchiveTask implements Runnable { callback.done(result, errorMessages, newDataSources); } } + + + private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages) { + Map validDataSourceProcessorsMap; + try { + validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(dataSourcePath); + } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { + criticalErrorOccurred = true; + errorMessages.add(ex.getMessage()); + logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS + return Collections.emptyList(); + } + if (validDataSourceProcessorsMap.isEmpty()) { + return Collections.emptyList(); + } + + // Get an ordered list of data source processors to try + List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); + + for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { + + // skip local files and local disk DSPs, only looking for "valid" data sources + if (selectedProcessor instanceof LocalDiskDSProcessor) { + validDataSourceProcessors.remove(selectedProcessor); + continue; + } + if (selectedProcessor instanceof LocalFilesDSProcessor) { + validDataSourceProcessors.remove(selectedProcessor); + continue; + } + // also skip nested archive files, those will be ingested as logical files and extracted during ingest + if (selectedProcessor instanceof ArchiveExtractorDSProcessor) { + validDataSourceProcessors.remove(selectedProcessor); + continue; + } + } + + return validDataSourceProcessors; + } + private Path createDirectoryForFile(String fileName, String baseDirectory) { // get file name without full path or extension From c95fa28c4942da407a0596310df80a6c929eab5c Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 6 Nov 2017 13:03:28 -0500 Subject: [PATCH 17/45] Tracking auto ingest alerts via case nodes. --- .../autoingest/AutoIngestAlertFile.java | 108 ---------- .../autoingest/AutoIngestManager.java | 202 ++++++++++-------- 2 files changed, 111 insertions(+), 199 deletions(-) delete mode 100755 Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAlertFile.java diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAlertFile.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAlertFile.java deleted file mode 100755 index 40fff351eb..0000000000 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestAlertFile.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2015 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.experimental.autoingest; - -import java.io.IOException; -import java.nio.file.FileAlreadyExistsException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.logging.Level; - -/** - * Utility for creating and checking for the existence of an automated ingest - * alert file. The purpose of the file is to put a marker in the case directory - * when an error or warning occurs in connection with an automated ingest job. - */ -final class AutoIngestAlertFile { - - private static final String ERROR_FILE_NAME = "autoingest.alert"; - - /** - * Checks whether an automated ingest alert file exists in a case directory. - * - * @param caseDirectoryPath The case directory path. - * - * @return True or false. - */ - static boolean exists(Path caseDirectoryPath) { - return caseDirectoryPath.resolve(ERROR_FILE_NAME).toFile().exists(); - } - - /** - * Creates an automated ingest alert file in a case directory if such a file - * does not already exist. - * - * @param caseDirectoryPath The case directory path. - * - * @return True or false. - */ - static void create(Path caseDirectoryPath) throws AutoIngestAlertFileException { - try { - Files.createFile(caseDirectoryPath.resolve(ERROR_FILE_NAME)); - } catch (FileAlreadyExistsException ignored) { - /* - * The file already exists, the exception is not exceptional. - */ - } catch (IOException ex) { - /* - * FileAlreadyExistsException implementation is optional, so check - * for that case. - */ - if (!exists(caseDirectoryPath)) { - throw new AutoIngestAlertFileException(String.format("Error creating automated ingest alert file in %s", caseDirectoryPath), ex); - } - } - } - - /** - * Exception thrown when there is a problem creating an alert file. - */ - final static class AutoIngestAlertFileException extends Exception { - - private static final long serialVersionUID = 1L; - - /** - * Constructs an exception to throw when there is a problem creating an - * alert file. - * - * @param message The exception message. - */ - private AutoIngestAlertFileException(String message) { - super(message); - } - - /** - * Constructs an exception to throw when there is a problem creating an - * alert file. - * - * @param message The exception message. - * @param cause The cause of the exception, if it was an exception. - */ - private AutoIngestAlertFileException(String message, Throwable cause) { - super(message, cause); - } - } - - /** - * Prevents instantiation of this utility class. - */ - private AutoIngestAlertFile() { - } - -} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index b621cb514f..a0168cd9ee 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -64,6 +64,7 @@ import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.CaseActionException; import org.sleuthkit.autopsy.casemodule.CaseDetails; import org.sleuthkit.autopsy.casemodule.CaseMetadata; +import org.sleuthkit.autopsy.casemodule.CaseNodeData; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.Lock; @@ -78,7 +79,6 @@ import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.events.AutopsyEventException; import org.sleuthkit.autopsy.events.AutopsyEventPublisher; -import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestAlertFile.AutoIngestAlertFileException; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobLogger.AutoIngestJobLoggerException; import org.sleuthkit.autopsy.experimental.autoingest.FileExporter.FileExportException; import org.sleuthkit.autopsy.experimental.autoingest.ManifestFileParser.ManifestFileParserException; @@ -98,6 +98,7 @@ import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobStartResult; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestModuleError; +import org.sleuthkit.datamodel.Content; /** * An auto ingest manager is responsible for processing auto ingest jobs defined @@ -554,7 +555,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang ++maxPriority; for (AutoIngestJob job : prioritizedJobs) { try { - this.updateCoordinationServiceNode(job); + this.updateCoordinationServiceManifestNode(job); job.setPriority(maxPriority); } catch (CoordinationServiceException | InterruptedException ex) { throw new AutoIngestManagerException("Error updating case priority", ex); @@ -602,13 +603,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /* - * Bump the priority by one and update the coordination service node - * data for the job. + * Bump the priority by one and update the coordination service + * manifest node data for the job. */ if (null != prioritizedJob) { ++maxPriority; try { - this.updateCoordinationServiceNode(prioritizedJob); + this.updateCoordinationServiceManifestNode(prioritizedJob); } catch (CoordinationServiceException | InterruptedException ex) { throw new AutoIngestManagerException("Error updating job priority", ex); } @@ -649,7 +650,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang /* * Add the job to the pending jobs queue and update the coordination - * service node data for the job. + * service manifest node data for the job. */ if (null != completedJob && !completedJob.getCaseDirectoryPath().toString().isEmpty()) { try { @@ -661,7 +662,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang completedJob.setCompletedDate(new Date(0)); completedJob.setProcessingStatus(PENDING); completedJob.setProcessingStage(AutoIngestJob.Stage.PENDING, Date.from(Instant.now())); - updateCoordinationServiceNode(completedJob); + updateCoordinationServiceManifestNode(completedJob); pendingJobs.add(completedJob); } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Coordination service error while reprocessing %s", manifestPath), ex); @@ -755,7 +756,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString())); AutoIngestJob deletedJob = new AutoIngestJob(nodeData); deletedJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.DELETED); - this.updateCoordinationServiceNode(deletedJob); + this.updateCoordinationServiceManifestNode(deletedJob); } catch (AutoIngestJobNodeData.InvalidDataException | AutoIngestJobException ex) { SYS_LOGGER.log(Level.WARNING, String.format("Invalid auto ingest job node data for %s", manifestPath), ex); return CaseDeletionResult.PARTIALLY_DELETED; @@ -865,20 +866,31 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /** - * Sets the coordination service node data for an auto ingest job. + * Sets the coordination service manifest node. * - * Note that a new auto ingest node data object will be created from the job - * passed in. Thus, if the data version of the node has changed, the node - * will be "upgraded" as well as updated. + * Note that a new auto ingest job node data object will be created from + * the job passed in. Thus, if the data version of the node has changed, + * the node will be "upgraded" as well as updated. * * @param job The auto ingest job. */ - void updateCoordinationServiceNode(AutoIngestJob job) throws CoordinationServiceException, InterruptedException { + void updateCoordinationServiceManifestNode(AutoIngestJob job) throws CoordinationServiceException, InterruptedException { AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(job); String manifestNodePath = job.getManifest().getFilePath().toString(); byte[] rawData = nodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData); } + + /** + * Sets the coordination service case node. + * + * @param caseNodeData The case node data. + * @param caseDirectoryPath The case directory. + */ + void updateCoordinationServiceCaseNode(CaseNodeData caseNodeData, Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException { + byte[] rawData = caseNodeData.toArray(); + coordinationService.setNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString(), rawData); + } /** * A task that submits an input directory scan task to the input directory @@ -1147,8 +1159,8 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /* - * Try to upgrade/update the coordination service node data for - * the job. + * Try to upgrade/update the coordination service manifest node + * data for the job. * * An exclusive lock is obtained before doing so because another * host may have already found the job, obtained an exclusive @@ -1161,7 +1173,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { - updateCoordinationServiceNode(job); + updateCoordinationServiceManifestNode(job); } } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex); @@ -1186,9 +1198,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang */ private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException { /* - * Create the coordination service node data for the job. Note that - * getting the lock will create the node for the job (with no data) - * if it does not already exist. + * Create the coordination service manifest node data for the job. + * Note that getting the lock will create the node for the job + * (with no data) if it does not already exist. * * An exclusive lock is obtained before creating the node data * because another host may have already found the job, obtained an @@ -1202,7 +1214,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { AutoIngestJob job = new AutoIngestJob(manifest); - updateCoordinationServiceNode(job); + updateCoordinationServiceManifestNode(job); newPendingJobsList.add(job); } } catch (CoordinationServiceException ex) { @@ -1219,14 +1231,14 @@ public final class AutoIngestManager extends Observable implements PropertyChang * status was not updated. * * @param manifest The manifest for upgrading the node. - * @param nodeData The node data. + * @param jobNodeData The auto ingest job node data. * * @throws InterruptedException if the thread running the input * directory scan task is interrupted while * blocked, i.e., if auto ingest is * shutting down. */ - private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData nodeData) throws InterruptedException, AutoIngestJobException { + private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData jobNodeData) throws InterruptedException, AutoIngestJobException { /* * Try to get an exclusive lock on the coordination service node for * the job. If the lock cannot be obtained, another host in the auto @@ -1240,7 +1252,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang /* * Create the recovery job. */ - AutoIngestJob job = new AutoIngestJob(nodeData); + AutoIngestJob job = new AutoIngestJob(jobNodeData); int numberOfCrashes = job.getNumberOfCrashes(); ++numberOfCrashes; job.setNumberOfCrashes(numberOfCrashes); @@ -1254,15 +1266,16 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /* - * Update the coordination service node for the job. If - * this fails, leave the recovery to another host. + * Update the coordination service manifest node for + * the job. If this fails, leave the recovery to + * another host. */ try { - updateCoordinationServiceNode(job); + updateCoordinationServiceManifestNode(job); if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { newPendingJobsList.add(job); } else { - newCompletedJobsList.add(new AutoIngestJob(nodeData)); + newCompletedJobsList.add(new AutoIngestJob(jobNodeData)); } } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); @@ -1270,13 +1283,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang } /* - * Write the alert file and do the logging. + * Update the case node data and do the logging. */ if (null != caseDirectoryPath) { try { - AutoIngestAlertFile.create(nodeData.getCaseDirectoryPath()); - } catch (AutoIngestAlertFileException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error creating alert file for crashed job for %s", manifestPath), ex); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + } catch (CaseNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); } } if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { @@ -1292,7 +1307,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); if (null != caseDirectoryPath) { try { - new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), nodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry(); + new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), jobNodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry(); } catch (AutoIngestJobLoggerException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); } @@ -1352,15 +1367,16 @@ public final class AutoIngestManager extends Observable implements PropertyChang job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); /* - * Try to upgrade/update the coordination service node data - * for the job. It is possible that two hosts will both try - * to obtain the lock to do the upgrade operation at the - * same time. If this happens, the host that is holding the - * lock will complete the upgrade operation. + * Try to upgrade/update the coordination service manifest + * node data for the job. It is possible that two hosts + * will both try to obtain the lock to do the upgrade + * operation at the same time. If this happens, the host + * that is holding the lock will complete the upgrade + * operation. */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { - updateCoordinationServiceNode(job); + updateCoordinationServiceManifestNode(job); } } catch (CoordinationServiceException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifest.getFilePath()), ex); @@ -1507,8 +1523,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang errorState = ErrorState.ANALYSIS_STARTUP_ERROR; } else if (ex instanceof FileExportException) { errorState = ErrorState.FILE_EXPORT_ERROR; - } else if (ex instanceof AutoIngestAlertFileException) { - errorState = ErrorState.ALERT_FILE_ERROR; } else if (ex instanceof AutoIngestJobLoggerException) { errorState = ErrorState.JOB_LOGGER_ERROR; } else if (ex instanceof AutoIngestDataSourceProcessorException) { @@ -1691,9 +1705,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * @throws FileExportException if there is an * error exporting * files. - * @throws AutoIngestAlertFileException if there is an - * error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an * error writing to * the auto ingest @@ -1710,7 +1721,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * auto ingest node * data objects. */ - private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException { + private void processJobs() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException, CaseNodeData.InvalidDataException { SYS_LOGGER.log(Level.INFO, "Started processing pending jobs queue"); Lock manifestLock = JobProcessingTask.this.dequeueAndLockNextJob(); while (null != manifestLock) { @@ -1890,9 +1901,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * @throws FileExportException if there is an * error exporting * files. - * @throws AutoIngestAlertFileException if there is an - * error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an * error writing to * the auto ingest @@ -1909,13 +1917,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang * auto ingest node * data objects. */ - private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, AutoIngestJobNodeData.InvalidDataException { + private void processJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException { Path manifestPath = currentJob.getManifest().getFilePath(); SYS_LOGGER.log(Level.INFO, "Started processing of {0}", manifestPath); currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PROCESSING); currentJob.setProcessingStage(AutoIngestJob.Stage.STARTING, Date.from(Instant.now())); currentJob.setProcessingHostName(AutoIngestManager.LOCAL_HOST_NAME); - updateCoordinationServiceNode(currentJob); + updateCoordinationServiceManifestNode(currentJob); setChanged(); notifyObservers(Event.JOB_STARTED); eventPublisher.publishRemotely(new AutoIngestJobStartedEvent(currentJob)); @@ -1939,14 +1947,16 @@ public final class AutoIngestManager extends Observable implements PropertyChang currentJob.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING); } currentJob.setProcessingHostName(""); - updateCoordinationServiceNode(currentJob); + updateCoordinationServiceManifestNode(currentJob); boolean retry = (!currentJob.isCanceled() && !currentJob.isCompleted()); SYS_LOGGER.log(Level.INFO, "Completed processing of {0}, retry = {1}", new Object[]{manifestPath, retry}); if (currentJob.isCanceled()) { Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); if (null != caseDirectoryPath) { - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath); jobLogger.logJobCancelled(); } @@ -1994,7 +2004,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * i.e., if auto ingest is * shutting down. */ - private void attemptJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { + private void attemptJob() throws CoordinationServiceException, SharedConfigurationException, ServicesMonitorException, DatabaseServerDownException, KeywordSearchServerDownException, CaseManagementException, AnalysisStartupException, FileExportException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException { updateConfiguration(); if (currentJob.isCanceled() || jobProcessingTaskFuture.isCancelled()) { return; @@ -2159,8 +2169,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * level ingest modules. * @throws FileExportException if there is an error exporting * files. - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2169,7 +2177,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void runIngestForJob(Case caseForJob) throws CoordinationServiceException, AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { + private void runIngestForJob(Case caseForJob) throws CoordinationServiceException, AnalysisStartupException, FileExportException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException { try { if (currentJob.isCanceled() || jobProcessingTaskFuture.isCancelled()) { return; @@ -2197,8 +2205,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * level ingest modules. * @throws FileExportException if there is an error exporting * files. - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2207,7 +2213,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void ingestDataSource(Case caseForJob) throws AnalysisStartupException, FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { + private void ingestDataSource(Case caseForJob) throws AnalysisStartupException, FileExportException, AutoIngestJobLoggerException, InterruptedException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException, CoordinationServiceException { if (currentJob.isCanceled() || jobProcessingTaskFuture.isCancelled()) { return; } @@ -2255,8 +2261,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * * @return A data source object. * - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2265,7 +2269,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * interrupted while blocked, i.e., * if auto ingest is shutting down. */ - private DataSource identifyDataSource() throws AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { + private DataSource identifyDataSource() throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Identifying data source for {0} ", manifestPath); @@ -2277,22 +2281,22 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (!dataSource.exists()) { SYS_LOGGER.log(Level.SEVERE, "Missing data source for {0}", manifestPath); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logMissingDataSource(); return null; } String deviceId = manifest.getDeviceId(); return new DataSource(deviceId, dataSourcePath); } - + /** * Passes the data source for the current job through a data source * processor that adds it to the case database. * * @param dataSource The data source. * - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2301,7 +2305,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void runDataSourceProcessor(Case caseForJob, DataSource dataSource) throws InterruptedException, AutoIngestAlertFileException, AutoIngestJobLoggerException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException { + private void runDataSourceProcessor(Case caseForJob, DataSource dataSource) throws InterruptedException, AutoIngestJobLoggerException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Adding data source for {0} ", manifestPath); @@ -2327,7 +2331,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang // did we find a data source processor that can process the data source if (validDataSourceProcessorsMap.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. - AutoIngestAlertFile.create(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logFailedToIdentifyDataSource(); SYS_LOGGER.log(Level.WARNING, "Unsupported data source {0} for {1}", new Object[]{dataSource.getPath(), manifestPath}); // NON-NLS @@ -2353,7 +2359,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang // Log that the current DSP failed and set the error flag. We consider it an error // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. - AutoIngestAlertFile.create(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()}); @@ -2377,8 +2385,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * * @param dataSource The data source. * - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2387,7 +2393,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void logDataSourceProcessorResult(DataSource dataSource) throws AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { + private void logDataSourceProcessorResult(DataSource dataSource) throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); @@ -2399,7 +2405,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2411,7 +2419,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2421,7 +2431,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang SYS_LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{manifestPath, errorMessage}); } currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logFailedToAddDataSource(); break; } @@ -2435,7 +2447,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang */ SYS_LOGGER.log(Level.WARNING, "Cancellation while waiting for data source processor for {0}", manifestPath); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logDataSourceProcessorCancelled(); } } @@ -2449,8 +2463,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * * @throws AnalysisStartupException if there is an error analyzing * the data source. - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2459,7 +2471,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void analyze(DataSource dataSource) throws AnalysisStartupException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { + private void analyze(DataSource dataSource) throws AnalysisStartupException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", manifestPath); @@ -2491,7 +2503,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang if (!cancelledModules.isEmpty()) { SYS_LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); for (String module : snapshot.getCancelledDataSourceIngestModules()) { SYS_LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); jobLogger.logIngestModuleCancelled(module); @@ -2501,7 +2515,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang } else { currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logAnalysisCancelled(); CancellationReason cancellationReason = snapshot.getCancellationReason(); if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { @@ -2514,13 +2530,17 @@ public final class AutoIngestManager extends Observable implements PropertyChang SYS_LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logIngestModuleStartupErrors(); throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); } else { SYS_LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logAnalysisStartupError(); throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } @@ -2529,7 +2549,9 @@ public final class AutoIngestManager extends Observable implements PropertyChang SYS_LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); } currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logIngestJobSettingsErrors(); throw new AnalysisStartupException("Error(s) in ingest job settings"); } @@ -2548,8 +2570,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang * * @throws FileExportException if there is an error exporting * the files. - * @throws AutoIngestAlertFileException if there is an error creating an - * alert file. * @throws AutoIngestJobLoggerException if there is an error writing to * the auto ingest log for the * case. @@ -2558,7 +2578,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang * while blocked, i.e., if auto * ingest is shutting down. */ - private void exportFiles(DataSource dataSource) throws FileExportException, AutoIngestAlertFileException, AutoIngestJobLoggerException, InterruptedException { + private void exportFiles(DataSource dataSource) throws FileExportException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Exporting files for {0}", manifestPath); @@ -2574,11 +2594,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang } catch (FileExportException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error doing file export for %s", manifestPath), ex); currentJob.setErrorsOccurred(true); - AutoIngestAlertFile.create(caseDirectoryPath); // Do this first, it is more important than the case log + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logFileExportError(); } } - + /** * A data source processor progress monitor does nothing. There is * currently no mechanism for showing or recording data source processor @@ -2740,7 +2762,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang currentJob.getProcessingStageDetails(); setChanged(); notifyObservers(Event.JOB_STATUS_UPDATED); - updateCoordinationServiceNode(currentJob); + updateCoordinationServiceManifestNode(currentJob); eventPublisher.publishRemotely(new AutoIngestJobStatusEvent(currentJob)); } @@ -2843,7 +2865,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang CASE_MANAGEMENT_ERROR("Case management error"), ANALYSIS_STARTUP_ERROR("Analysis startup error"), FILE_EXPORT_ERROR("File export error"), - ALERT_FILE_ERROR("Alert file error"), JOB_LOGGER_ERROR("Job logger error"), DATA_SOURCE_PROCESSOR_ERROR("Data source processor error"), UNEXPECTED_EXCEPTION("Unknown error"); @@ -2918,7 +2939,6 @@ public final class AutoIngestManager extends Observable implements PropertyChang PARTIALLY_DELETED, FULLY_DELETED } - static final class AutoIngestManagerException extends Exception { private static final long serialVersionUID = 1L; From a17f2126675a3b815258e6c4ae0e8f1f5a9aca0d Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 6 Nov 2017 13:10:02 -0500 Subject: [PATCH 18/45] Cleanup. --- .../autopsy/casemodule/CaseNodeData.java | 141 ++++++++++++++++++ .../autoingest/AutoIngestManager.java | 3 +- 2 files changed, 142 insertions(+), 2 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java new file mode 100755 index 0000000000..a97f48c10d --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java @@ -0,0 +1,141 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.casemodule; + +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; + +/** + * An object that converts case data for a case directory coordination service + * node to and from byte arrays. + */ +public class CaseNodeData { + + private static final int CURRENT_VERSION = 0; + + private int version; + private boolean errorsOccurred; + + /** + * Gets the current version of the case directory coordination service node + * data. + * + * @return The version number. + */ + public static int getCurrentVersion() { + return CaseNodeData.CURRENT_VERSION; + } + + /** + * Uses coordination service node data to construct an object that converts + * case data for a case directory coordination service node to and from byte + * arrays. + * + * @param nodeData The raw bytes received from the coordination service. + * + * @throws InvalidDataException If the node data buffer is smaller than + * expected. + */ + public CaseNodeData(byte[] nodeData) throws InvalidDataException { + if(nodeData == null || nodeData.length == 0) { + this.version = CURRENT_VERSION; + this.errorsOccurred = false; + } else { + /* + * Get fields from node data. + */ + ByteBuffer buffer = ByteBuffer.wrap(nodeData); + try { + if (buffer.hasRemaining()) { + this.version = buffer.getInt(); + + /* + * Flags bit format: 76543210 + * 0-6 --> reserved for future use + * 7 --> errorsOccurred + */ + byte flags = buffer.get(); + this.errorsOccurred = (flags < 0); + } + } catch (BufferUnderflowException ex) { + throw new InvalidDataException("Node data is incomplete", ex); + } + } + } + + /** + * Gets whether or not any errors occurred during the processing of the job. + * + * @return True or false. + */ + public boolean getErrorsOccurred() { + return this.errorsOccurred; + } + + /** + * Sets whether or not any errors occurred during the processing of job. + * + * @param errorsOccurred True or false. + */ + public void setErrorsOccurred(boolean errorsOccurred) { + this.errorsOccurred = errorsOccurred; + } + + /** + * Gets the node data version number. + * + * @return The version number. + */ + public int getVersion() { + return this.version; + } + + /** + * Gets the node data as a byte array that can be sent to the coordination + * service. + * + * @return The node data as a byte array. + */ + public byte[] toArray() { + ByteBuffer buffer = ByteBuffer.allocate(5); + + buffer.putInt(this.version); + buffer.put((byte)(this.errorsOccurred ? 0x80 : 0)); + + // Prepare the array + byte[] array = new byte[buffer.position()]; + buffer.rewind(); + buffer.get(array, 0, array.length); + + return array; + } + + public final static class InvalidDataException extends Exception { + + private static final long serialVersionUID = 1L; + + private InvalidDataException(String message) { + super(message); + } + + private InvalidDataException(String message, Throwable cause) { + super(message, cause); + } + } +} diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index a0168cd9ee..2ce6e2cba8 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -98,7 +98,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobStartResult; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestModuleError; -import org.sleuthkit.datamodel.Content; /** * An auto ingest manager is responsible for processing auto ingest jobs defined @@ -114,7 +113,7 @@ import org.sleuthkit.datamodel.Content; * The activities of the auto ingest nodes in a cluster are coordinated by way * of a coordination service and the nodes communicate via event messages. */ -public final class AutoIngestManager extends Observable implements PropertyChangeListener { +final class AutoIngestManager extends Observable implements PropertyChangeListener { private static final int NUM_INPUT_SCAN_SCHEDULING_THREADS = 1; private static final String INPUT_SCAN_SCHEDULER_THREAD_NAME = "AIM-input-scan-scheduler-%d"; From ab77d2c5aadbad4e43171a3ac709bc8dafa1871f Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 6 Nov 2017 13:11:34 -0500 Subject: [PATCH 19/45] Cleanup. --- Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java index a97f48c10d..1eceb084e6 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java @@ -25,7 +25,7 @@ import java.nio.ByteBuffer; * An object that converts case data for a case directory coordination service * node to and from byte arrays. */ -public class CaseNodeData { +public final class CaseNodeData { private static final int CURRENT_VERSION = 0; From a87ea617920d572546d9a0d25658c32ae7b7d0a8 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 6 Nov 2017 14:10:10 -0500 Subject: [PATCH 20/45] ConcurrentModification bug fix --- .../autoingest/AddArchiveTask.java | 28 +++++++++---------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index b14255caff..a1a82cf0da 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -23,6 +23,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.UUID; @@ -245,7 +246,12 @@ class AddArchiveTask implements Runnable { } } - + /** + * + * @param dataSourcePath + * @param errorMessages + * @return + */ private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages) { Map validDataSourceProcessorsMap; try { @@ -263,21 +269,15 @@ class AddArchiveTask implements Runnable { // Get an ordered list of data source processors to try List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); - for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { + for (Iterator iterator = validDataSourceProcessors.iterator(); iterator.hasNext();) { + AutoIngestDataSourceProcessor selectedProcessor = iterator.next(); - // skip local files and local disk DSPs, only looking for "valid" data sources - if (selectedProcessor instanceof LocalDiskDSProcessor) { - validDataSourceProcessors.remove(selectedProcessor); - continue; - } - if (selectedProcessor instanceof LocalFilesDSProcessor) { - validDataSourceProcessors.remove(selectedProcessor); - continue; - } + // skip local files and local disk DSPs, only looking for "valid" data sources. // also skip nested archive files, those will be ingested as logical files and extracted during ingest - if (selectedProcessor instanceof ArchiveExtractorDSProcessor) { - validDataSourceProcessors.remove(selectedProcessor); - continue; + if ( (selectedProcessor instanceof LocalDiskDSProcessor) || + (selectedProcessor instanceof LocalFilesDSProcessor) || + (selectedProcessor instanceof ArchiveExtractorDSProcessor) ) { + iterator.remove(); } } From 63caed024a36bd7a94b41db9a83a5da9cd3c6c5d Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 6 Nov 2017 15:04:55 -0500 Subject: [PATCH 21/45] Added comments and java docs --- .../autoingest/AddArchiveTask.java | 21 +++++++++++++++---- 1 file changed, 17 insertions(+), 4 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index a1a82cf0da..9b92010916 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -247,10 +247,14 @@ class AddArchiveTask implements Runnable { } /** - * - * @param dataSourcePath - * @param errorMessages - * @return + * Get a list of data source processors that can process the data source of + * interest. The list is sorted by confidence in decreasing order. + * LocalDisk, LocalFiles, and ArchiveDSP are removed from the list. + * + * @param dataSourcePath Full path to the data source + * @param errorMessages List for error messages + * + * @return Ordered list of applicable DSPs */ private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages) { Map validDataSourceProcessorsMap; @@ -285,6 +289,15 @@ class AddArchiveTask implements Runnable { } + /** + * Create a directory in ModuleOutput folder based on input file name. A + * time stamp is appended to the directory name. + * + * @param fileName File name + * @param baseDirectory Base directory. Typically the case output directory. + * + * @return Full path to the new directory + */ private Path createDirectoryForFile(String fileName, String baseDirectory) { // get file name without full path or extension String fileNameNoExt = FilenameUtils.getBaseName(fileName); From d86b120d2b786e1ce1e23f125c4de48e4c3dab87 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 6 Nov 2017 15:52:12 -0500 Subject: [PATCH 22/45] Removed unused imports --- .../autopsy/experimental/autoingest/AddArchiveTask.java | 1 - 1 file changed, 1 deletion(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 0e462d0b61..3d4a0761d7 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -30,7 +30,6 @@ import java.util.UUID; import java.util.logging.Level; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; -import org.openide.util.Exceptions; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; From 83261afcb61895efd79cb0d4b1678d9c40abf0d3 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 6 Nov 2017 16:16:49 -0500 Subject: [PATCH 23/45] Removed unused imports --- .../experimental/autoingest/ArchiveExtractorDSProcessor.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index dee9879dec..00d7245b33 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -24,9 +24,7 @@ import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; import javax.swing.JPanel; -import org.openide.util.Exceptions; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProviders; From a46e406f60aadc1aefdaf1176449bd7d2bf56850 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Mon, 6 Nov 2017 17:49:52 -0500 Subject: [PATCH 24/45] Code review feedback --- .../autoingest/AddArchiveTask.java | 12 ++-- .../autoingest/AddDataSourceCallback.java | 7 --- .../ArchiveExtractorDSProcessor.java | 63 +------------------ 3 files changed, 8 insertions(+), 74 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 3d4a0761d7..d883a9b34f 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -97,11 +97,6 @@ class AddArchiveTask implements Runnable { logger.log(Level.INFO, "Using Archive Extractor DSP to process archive {0} ", archivePath); // extract the archive and pass the extracted folder as input - UUID taskId = UUID.randomUUID(); - if (callback instanceof AddDataSourceCallback) { - // if running as part of automated ingest - re-use the task ID - taskId = ((AddDataSourceCallback) callback).getTaskId(); - } try { Case currentCase = Case.getCurrentCase(); @@ -135,7 +130,7 @@ class AddArchiveTask implements Runnable { * folder and then add the data source from that folder. This is * necessary because after all valid data sources have been * identified, we are going to add the remaining extracted - * contents of the archive as a single logacl file set. Hence, + * contents of the archive as a single logical file set. Hence, * if we do not move the data sources out of the extracted * contents folder, those data source files will get added twice * and can potentially result in duplicate keyword hits. @@ -154,12 +149,13 @@ class AddArchiveTask implements Runnable { Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); // Try each DSP in decreasing order of confidence + UUID taskId = UUID.randomUUID(); + currentCase.notifyAddingDataSource(taskId); boolean success = false; for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); - // ELTBD - do we want to log this in case log and/or system admin log? synchronized (archiveDspLock) { try { DataSource internalDataSource = new DataSource(deviceId, newFilePath); @@ -210,6 +206,8 @@ class AddArchiveTask implements Runnable { progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); logger.log(Level.INFO, "Adding directory {0} as logical file set", destinationFolder.toString()); synchronized (archiveDspLock) { + UUID taskId = UUID.randomUUID(); + currentCase.notifyAddingDataSource(taskId); DataSource internalDataSource = new DataSource(deviceId, destinationFolder); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java index 181ad4d4d2..db19fc2fbc 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java @@ -54,13 +54,6 @@ class AddDataSourceCallback extends DataSourceProcessorCallback { this.taskId = taskId; this.lock = lock; } - - /** - * @return the taskId - */ - public UUID getTaskId() { - return taskId; - } /** * Called by the data source processor when it finishes running in its own diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index 00d7245b33..3c31d8d2d0 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -28,7 +28,6 @@ import javax.swing.JPanel; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProviders; -import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; @@ -40,12 +39,11 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; * be used independently of the wizard. */ @ServiceProviders(value={ - @ServiceProvider(service=DataSourceProcessor.class), @ServiceProvider(service=AutoIngestDataSourceProcessor.class)} ) @NbBundle.Messages({ "ArchiveDSP.dsType.text=Archive file"}) -public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIngestDataSourceProcessor { +public class ArchiveExtractorDSProcessor implements AutoIngestDataSourceProcessor { private final static String DATA_SOURCE_TYPE = Bundle.ArchiveDSP_dsType_text(); @@ -163,20 +161,11 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng } /** - * Requests cancellation of the background task that adds a data source to - * the case database, after the task is started using the run method. This - * is a "best effort" cancellation, with no guarantees that the case - * database will be unchanged. If cancellation succeeded, the list of new - * data sources returned by the background task will be empty. + * This DSP is a service to AutoIngestDataSourceProcessor only. Hence it is + * only used by AIM. AIM currently doesn't support DSP cancellation. */ @Override public void cancel() { - if (null != jobProcessingTaskFuture) { - jobProcessingTaskFuture.cancel(true); - jobProcessingExecutor.shutdown(); - // ELTBD - do we want to wait for the cancellation to complete? I think not, - // given that the cancelation is of "best effort" variety - } } @Override @@ -186,50 +175,4 @@ public class ArchiveExtractorDSProcessor implements DataSourceProcessor, AutoIng configPanel.reset(); setDataSourceOptionsCalled = false; } - - /** - * Extracts the contents of a ZIP archive submitted as a data source to a - * subdirectory of the auto ingest module output directory. - * - * @throws IOException if there is a problem extracting the data source from - * the archive. - - private static Path extractDataSource(Path outputDirectoryPath, Path dataSourcePath) throws IOException { - String dataSourceFileNameNoExt = FilenameUtils.removeExtension(dataSourcePath.getFileName().toString()); - Path destinationFolder = Paths.get(outputDirectoryPath.toString(), - AUTO_INGEST_MODULE_OUTPUT_DIR, - dataSourceFileNameNoExt + "_" + TimeStampUtils.createTimeStamp()); - Files.createDirectories(destinationFolder); - - int BUFFER_SIZE = 524288; // Read/write 500KB at a time - File sourceZipFile = dataSourcePath.toFile(); - ZipFile zipFile; - zipFile = new ZipFile(sourceZipFile, ZipFile.OPEN_READ); - Enumeration zipFileEntries = zipFile.entries(); - try { - while (zipFileEntries.hasMoreElements()) { - ZipEntry entry = zipFileEntries.nextElement(); - String currentEntry = entry.getName(); - File destFile = new File(destinationFolder.toString(), currentEntry); - destFile = new File(destinationFolder.toString(), destFile.getName()); - File destinationParent = destFile.getParentFile(); - destinationParent.mkdirs(); - if (!entry.isDirectory()) { - BufferedInputStream is = new BufferedInputStream(zipFile.getInputStream(entry)); - int currentByte; - byte data[] = new byte[BUFFER_SIZE]; - try (FileOutputStream fos = new FileOutputStream(destFile); BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER_SIZE)) { - currentByte = is.read(data, 0, BUFFER_SIZE); - while (currentByte != -1) { - dest.write(data, 0, currentByte); - currentByte = is.read(data, 0, BUFFER_SIZE); - } - } - } - } - } finally { - zipFile.close(); - } - return destinationFolder; - } */ } From 635537496ae886795e1d1197c83567ff37395100 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 7 Nov 2017 13:54:35 -0500 Subject: [PATCH 25/45] fixed height of External Viewer Global Settings Panel --- .../ExternalViewerGlobalSettingsPanel.form | 83 +++++++++++++------ .../ExternalViewerGlobalSettingsPanel.java | 59 +++++++------ 2 files changed, 94 insertions(+), 48 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.form index e00c4a966f..50706d661f 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.form +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.form @@ -3,7 +3,7 @@
- + @@ -32,19 +32,24 @@ + + + + + - + - + @@ -55,7 +60,7 @@ - + @@ -81,13 +86,18 @@ - + + + + + + @@ -103,7 +113,7 @@ - + @@ -114,7 +124,7 @@ - + @@ -137,6 +147,11 @@ + + + + + @@ -149,42 +164,35 @@ - - - - - - - - - - - + + + + - + - + - - - + + + - + @@ -224,6 +232,15 @@ + + + + + + + + + @@ -237,6 +254,15 @@ + + + + + + + + + @@ -250,6 +276,15 @@ + + + + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.java index b2497baed8..f87c6b4e33 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanel.java @@ -93,13 +93,17 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme editRuleButton = new javax.swing.JButton(); deleteRuleButton = new javax.swing.JButton(); - setPreferredSize(new java.awt.Dimension(750, 500)); + setPreferredSize(new java.awt.Dimension(701, 453)); + + jPanel1.setPreferredSize(new java.awt.Dimension(701, 453)); org.openide.awt.Mnemonics.setLocalizedText(externalViewerTitleLabel, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.externalViewerTitleLabel.text")); // NOI18N - jSplitPane1.setDividerLocation(350); + jSplitPane1.setDividerLocation(365); jSplitPane1.setDividerSize(1); + exePanel.setPreferredSize(new java.awt.Dimension(311, 224)); + org.openide.awt.Mnemonics.setLocalizedText(exePathLabel, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.exePathLabel.text")); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(exePathNameLabel, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.exePathNameLabel.text")); // NOI18N @@ -113,7 +117,7 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme .addGroup(exePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(exePathLabel) .addComponent(exePathNameLabel)) - .addContainerGap(159, Short.MAX_VALUE)) + .addContainerGap(47, Short.MAX_VALUE)) ); exePanelLayout.setVerticalGroup( exePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -122,17 +126,22 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme .addComponent(exePathLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(exePathNameLabel) - .addContainerGap(408, Short.MAX_VALUE)) + .addContainerGap(361, Short.MAX_VALUE)) ); jSplitPane1.setRightComponent(exePanel); + rulesPanel.setPreferredSize(new java.awt.Dimension(365, 406)); + org.openide.awt.Mnemonics.setLocalizedText(ruleListLabel, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.ruleListLabel.text")); // NOI18N rulesScrollPane.setViewportView(rulesList); newRuleButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/add16.png"))); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(newRuleButton, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.newRuleButton.text")); // NOI18N + newRuleButton.setMaximumSize(new java.awt.Dimension(111, 25)); + newRuleButton.setMinimumSize(new java.awt.Dimension(111, 25)); + newRuleButton.setPreferredSize(new java.awt.Dimension(111, 25)); newRuleButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { newRuleButtonActionPerformed(evt); @@ -141,6 +150,9 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme editRuleButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/edit16.png"))); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(editRuleButton, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.editRuleButton.text")); // NOI18N + editRuleButton.setMaximumSize(new java.awt.Dimension(111, 25)); + editRuleButton.setMinimumSize(new java.awt.Dimension(111, 25)); + editRuleButton.setPreferredSize(new java.awt.Dimension(111, 25)); editRuleButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { editRuleButtonActionPerformed(evt); @@ -149,6 +161,9 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme deleteRuleButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/delete16.png"))); // NOI18N org.openide.awt.Mnemonics.setLocalizedText(deleteRuleButton, org.openide.util.NbBundle.getMessage(ExternalViewerGlobalSettingsPanel.class, "ExternalViewerGlobalSettingsPanel.deleteRuleButton.text")); // NOI18N + deleteRuleButton.setMaximumSize(new java.awt.Dimension(111, 25)); + deleteRuleButton.setMinimumSize(new java.awt.Dimension(111, 25)); + deleteRuleButton.setPreferredSize(new java.awt.Dimension(111, 25)); deleteRuleButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { deleteRuleButtonActionPerformed(evt); @@ -162,20 +177,16 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme .addGroup(rulesPanelLayout.createSequentialGroup() .addContainerGap() .addGroup(rulesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(rulesPanelLayout.createSequentialGroup() - .addGroup(rulesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(ruleListLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addGroup(rulesPanelLayout.createSequentialGroup() - .addComponent(rulesScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 311, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, Short.MAX_VALUE))) - .addContainerGap()) - .addGroup(rulesPanelLayout.createSequentialGroup() - .addComponent(newRuleButton) + .addComponent(ruleListLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(rulesScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 345, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, rulesPanelLayout.createSequentialGroup() + .addGap(0, 0, Short.MAX_VALUE) + .addComponent(newRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(editRuleButton) + .addComponent(editRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(deleteRuleButton) - .addGap(0, 0, Short.MAX_VALUE)))) + .addComponent(deleteRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) + .addContainerGap()) ); rulesPanelLayout.setVerticalGroup( rulesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -183,12 +194,12 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme .addContainerGap() .addComponent(ruleListLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(rulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 380, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(rulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 328, Short.MAX_VALUE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(rulesPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(newRuleButton) - .addComponent(editRuleButton) - .addComponent(deleteRuleButton)) + .addComponent(newRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(editRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(deleteRuleButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap()) ); @@ -202,12 +213,12 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() - .addComponent(externalViewerTitleLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 777, Short.MAX_VALUE) + .addComponent(externalViewerTitleLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 681, Short.MAX_VALUE) .addContainerGap()) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 777, Short.MAX_VALUE) + .addComponent(jScrollPane1) .addContainerGap())) ); jPanel1Layout.setVerticalGroup( @@ -215,7 +226,7 @@ final class ExternalViewerGlobalSettingsPanel extends javax.swing.JPanel impleme .addGroup(jPanel1Layout.createSequentialGroup() .addContainerGap() .addComponent(externalViewerTitleLabel) - .addContainerGap(475, Short.MAX_VALUE)) + .addContainerGap(428, Short.MAX_VALUE)) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(32, 32, 32) From b85a41b4b9f5ade8e5a6cdde7bada63d53cacf06 Mon Sep 17 00:00:00 2001 From: William Schaefer Date: Tue, 7 Nov 2017 13:54:58 -0500 Subject: [PATCH 26/45] fixed height of File ext mismatch settings Panel --- .../FileExtMismatchSettingsPanel.form | 87 +++++++++++++------ .../FileExtMismatchSettingsPanel.java | 62 ++++++++----- 2 files changed, 101 insertions(+), 48 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.form index 162aef8994..f8ffc03428 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.form +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.form @@ -1,6 +1,11 @@ + + + + + @@ -16,12 +21,12 @@ - + - + @@ -29,7 +34,7 @@ - + @@ -37,36 +42,45 @@ - - - + + + - + - + + + + - + + + + + + + @@ -76,23 +90,26 @@ - + - + + + + + - + - + - + - @@ -101,14 +118,14 @@ - - - + + + - + - + @@ -142,6 +159,15 @@ + + + + + + + + + @@ -166,6 +192,11 @@ + + + + + @@ -198,16 +229,16 @@ - + - - - + + + - + @@ -221,6 +252,12 @@ + + + + + + diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.java index 1d5470b4df..85cf916f2c 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchSettingsPanel.java @@ -154,11 +154,18 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel removeExtButton = new javax.swing.JButton(); extHeaderLabel = new javax.swing.JLabel(); - jPanel1.setPreferredSize(new java.awt.Dimension(687, 450)); + setPreferredSize(new java.awt.Dimension(718, 430)); - jSplitPane1.setDividerLocation(430); + jPanel1.setPreferredSize(new java.awt.Dimension(718, 430)); + + jScrollPane1.setRequestFocusEnabled(false); + + jSplitPane1.setDividerLocation(365); jSplitPane1.setDividerSize(1); + mimePanel.setPreferredSize(new java.awt.Dimension(369, 424)); + mimePanel.setRequestFocusEnabled(false); + jLabel1.setText(org.openide.util.NbBundle.getMessage(FileExtMismatchSettingsPanel.class, "FileExtMismatchSettingsPanel.jLabel1.text")); // NOI18N mimeTable.setModel(mimeTableModel); @@ -166,6 +173,9 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel newTypeButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/add16.png"))); // NOI18N newTypeButton.setText(org.openide.util.NbBundle.getMessage(FileExtMismatchSettingsPanel.class, "FileExtMismatchSettingsPanel.newTypeButton.text")); // NOI18N + newTypeButton.setMaximumSize(new java.awt.Dimension(111, 25)); + newTypeButton.setMinimumSize(new java.awt.Dimension(111, 25)); + newTypeButton.setPreferredSize(new java.awt.Dimension(111, 25)); newTypeButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { newTypeButtonActionPerformed(evt); @@ -188,16 +198,18 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel .addGroup(mimePanelLayout.createSequentialGroup() .addContainerGap() .addGroup(mimePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) + .addGroup(mimePanelLayout.createSequentialGroup() + .addGap(0, 0, Short.MAX_VALUE) + .addComponent(jLabel1) + .addGap(286, 286, 286)) .addGroup(mimePanelLayout.createSequentialGroup() .addGroup(mimePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jLabel1) + .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 349, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(mimePanelLayout.createSequentialGroup() - .addComponent(newTypeButton) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(newTypeButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(10, 10, 10) .addComponent(removeTypeButton))) - .addGap(0, 191, Short.MAX_VALUE))) - .addContainerGap()) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) ); mimePanelLayout.setVerticalGroup( mimePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -205,18 +217,22 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel .addContainerGap() .addComponent(jLabel1) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 348, Short.MAX_VALUE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(mimePanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(newTypeButton) + .addComponent(newTypeButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(removeTypeButton, javax.swing.GroupLayout.PREFERRED_SIZE, 25, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap()) ); jSplitPane1.setLeftComponent(mimePanel); + extensionPanel.setPreferredSize(new java.awt.Dimension(344, 424)); + newExtButton.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/add16.png"))); // NOI18N newExtButton.setText(org.openide.util.NbBundle.getMessage(FileExtMismatchSettingsPanel.class, "FileExtMismatchSettingsPanel.newExtButton.text")); // NOI18N + newExtButton.setMaximumSize(new java.awt.Dimension(111, 25)); + newExtButton.setMinimumSize(new java.awt.Dimension(111, 25)); newExtButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { newExtButtonActionPerformed(evt); @@ -248,7 +264,7 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel .addGroup(extensionPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(extHeaderLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 324, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(extensionPanelLayout.createSequentialGroup() - .addComponent(newExtButton) + .addComponent(newExtButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(removeExtButton))) .addGap(0, 0, Short.MAX_VALUE))) @@ -260,10 +276,10 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel .addContainerGap() .addComponent(extHeaderLabel) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 427, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(jScrollPane3, javax.swing.GroupLayout.DEFAULT_SIZE, 348, Short.MAX_VALUE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addGroup(extensionPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(newExtButton) + .addComponent(newExtButton, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(removeExtButton)) .addContainerGap()) ); @@ -277,27 +293,27 @@ final class FileExtMismatchSettingsPanel extends IngestModuleGlobalSettingsPanel jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() - .addContainerGap() - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 797, Short.MAX_VALUE) - .addContainerGap()) + .addGap(0, 0, 0) + .addComponent(jScrollPane1) + .addGap(0, 0, 0)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() - .addContainerGap() - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 504, Short.MAX_VALUE) - .addContainerGap()) + .addGap(0, 0, 0) + .addComponent(jScrollPane1) + .addGap(0, 0, 0)) ); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 817, Short.MAX_VALUE) + .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 526, Short.MAX_VALUE) + .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) ); }// //GEN-END:initComponents From e1546bd51043b4931e265efebadfcc5936e7839d Mon Sep 17 00:00:00 2001 From: esaunders Date: Tue, 7 Nov 2017 16:12:06 -0500 Subject: [PATCH 27/45] Only create one KeywordHit instance per document for a given hit. --- .../sleuthkit/autopsy/keywordsearch/RegexQuery.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java index 9c92cdea5d..a0383ef03b 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java @@ -236,6 +236,8 @@ final class RegexQuery implements KeywordSearchQuery { private List createKeywordHits(SolrDocument solrDoc) throws TskCoreException { + final HashMap keywordsFoundInThisDocument = new HashMap<>(); + List hits = new ArrayList<>(); final String docId = solrDoc.getFieldValue(Server.Schema.ID.toString()).toString(); final Integer chunkSize = (Integer) solrDoc.getFieldValue(Server.Schema.CHUNK_SIZE.toString()); @@ -283,9 +285,14 @@ final class RegexQuery implements KeywordSearchQuery { hit = hit.replaceAll("[^0-9]$", ""); } - // Optimization to reduce the number of String objects created. + // We will only create one KeywordHit instance per document for + // a given hit. + if (keywordsFoundInThisDocument.containsKey(hit)) { + continue; + } + keywordsFoundInThisDocument.put(hit, hit); + if (keywordsFoundAcrossAllDocuments.containsKey(hit)) { - // Use an existing String reference if it exists. hit = keywordsFoundAcrossAllDocuments.get(hit); } else { keywordsFoundAcrossAllDocuments.put(hit, hit); From 0a6b3bc62b55c2a05533988b8b5c55015605e29a Mon Sep 17 00:00:00 2001 From: esaunders Date: Tue, 7 Nov 2017 16:48:11 -0500 Subject: [PATCH 28/45] Use String interning instead of our own hashmap to reuse hits. --- .../autopsy/keywordsearch/RegexQuery.java | 26 +++++++------------ 1 file changed, 10 insertions(+), 16 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java index a0383ef03b..7702cfdc7e 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/RegexQuery.java @@ -100,15 +100,6 @@ final class RegexQuery implements KeywordSearchQuery { private String escapedQuery; private String field = Server.Schema.CONTENT_STR.toString(); - /** - * The following map is an optimization to ensure that we reuse - * the same keyword hit String object across all hits. Even though we - * benefit from G1GC String deduplication, the overhead associated with - * creating a new String object for every KeywordHit can be significant - * when the number of hits gets large. - */ - private final HashMap keywordsFoundAcrossAllDocuments; - /** * Constructor with query to process. * @@ -122,7 +113,6 @@ final class RegexQuery implements KeywordSearchQuery { this.queryStringContainsWildcardPrefix = this.keywordString.startsWith(".*"); this.queryStringContainsWildcardSuffix = this.keywordString.endsWith(".*"); - this.keywordsFoundAcrossAllDocuments = new HashMap<>(); } @Override @@ -285,6 +275,16 @@ final class RegexQuery implements KeywordSearchQuery { hit = hit.replaceAll("[^0-9]$", ""); } + /** + * The use of String interning is an optimization to ensure + * that we reuse the same keyword hit String object across + * all hits. Even though we benefit from G1GC String + * deduplication, the overhead associated with creating a + * new String object for every KeywordHit can be significant + * when the number of hits gets large. + */ + hit = hit.intern(); + // We will only create one KeywordHit instance per document for // a given hit. if (keywordsFoundInThisDocument.containsKey(hit)) { @@ -292,12 +292,6 @@ final class RegexQuery implements KeywordSearchQuery { } keywordsFoundInThisDocument.put(hit, hit); - if (keywordsFoundAcrossAllDocuments.containsKey(hit)) { - hit = keywordsFoundAcrossAllDocuments.get(hit); - } else { - keywordsFoundAcrossAllDocuments.put(hit, hit); - } - if (artifactAttributeType == null) { hits.add(new KeywordHit(docId, makeSnippet(content, hitMatcher, hit), hit)); } else { From 88d668ffeb7c7f4039d5156350601d21424298fa Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Wed, 8 Nov 2017 00:54:53 -0500 Subject: [PATCH 29/45] Wait cursor set for 'Ingest Module Settings' button. --- .../experimental/configuration/AutoIngestSettingsPanel.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/AutoIngestSettingsPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/AutoIngestSettingsPanel.java index 0d449fdb02..58f68f3b54 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/AutoIngestSettingsPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/configuration/AutoIngestSettingsPanel.java @@ -19,6 +19,7 @@ package org.sleuthkit.autopsy.experimental.configuration; import java.awt.BorderLayout; +import java.awt.Cursor; import java.io.File; import java.nio.file.Files; import java.util.List; @@ -513,7 +514,8 @@ public class AutoIngestSettingsPanel extends javax.swing.JPanel { } private void displayIngestJobSettingsPanel() { - + this.getParent().setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + IngestJobSettings ingestJobSettings = new IngestJobSettings(AutoIngestUserPreferences.getAutoModeIngestModuleContextString()); showWarnings(ingestJobSettings); IngestJobSettingsPanel ingestJobSettingsPanel = new IngestJobSettingsPanel(ingestJobSettings); @@ -526,6 +528,8 @@ public class AutoIngestSettingsPanel extends javax.swing.JPanel { ingestJobSettings.save(); showWarnings(ingestJobSettings); } + + this.getParent().setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); } private static void showWarnings(IngestJobSettings ingestJobSettings) { From dd34e770d800219167088f3dac0fa2b53d43bb58 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 12:24:29 -0500 Subject: [PATCH 30/45] Fixed a bug in AIMs use of taskId and data source addition notification --- .../autoingest/AutoIngestManager.java | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 855a93da71..d12d0e7f0b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -2320,18 +2320,15 @@ public final class AutoIngestManager extends Observable implements PropertyChang Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Adding data source for {0} ", manifestPath); currentJob.setProcessingStage(AutoIngestJob.Stage.ADDING_DATA_SOURCE, Date.from(Instant.now())); - UUID taskId = UUID.randomUUID(); - DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock); DataSourceProcessorProgressMonitor progressMonitor = new DoNothingDSPProgressMonitor(); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, manifest.getDataSourceFileName(), caseDirectoryPath); try { - caseForJob.notifyAddingDataSource(taskId); - Map validDataSourceProcessorsMap; + // Get an ordered list of data source processors to try + List validDataSourceProcessors; try { - // lookup all AutomatedIngestDataSourceProcessors and poll which ones are able to process the current data source - validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(dataSource.getPath()); + validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSource.getPath()); } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { SYS_LOGGER.log(Level.SEVERE, "Exception while determining best data source processor for {0}", dataSource.getPath()); // rethrow the exception. It will get caught & handled upstream and will result in AIM auto-pause. @@ -2339,7 +2336,7 @@ public final class AutoIngestManager extends Observable implements PropertyChang } // did we find a data source processor that can process the data source - if (validDataSourceProcessorsMap.isEmpty()) { + if (validDataSourceProcessors.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. AutoIngestAlertFile.create(caseDirectoryPath); currentJob.setErrorsOccurred(true); @@ -2348,12 +2345,13 @@ public final class AutoIngestManager extends Observable implements PropertyChang return; } - // Get an ordered list of data source processors to try - List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); - synchronized (ingestLock) { // Try each DSP in decreasing order of confidence for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { + UUID taskId = UUID.randomUUID(); + caseForJob.notifyAddingDataSource(taskId); + DataSourceProcessorCallback callBack = new AddDataSourceCallback(caseForJob, dataSource, taskId, ingestLock); + caseForJob.notifyAddingDataSource(taskId); jobLogger.logDataSourceProcessorSelected(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.INFO, "Identified data source type for {0} as {1}", new Object[]{manifestPath, selectedProcessor.getDataSourceType()}); try { From cb7a93d0819faa14b88e952313490f7b69fb5e43 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 12:25:27 -0500 Subject: [PATCH 31/45] Added simpler interfaces that require only one method call --- .../DataSourceProcessorUtility.java | 49 +++++++++++++++++-- 1 file changed, 44 insertions(+), 5 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java index a8aafe4236..4878f7fa7d 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSourceProcessorUtility.java @@ -47,11 +47,7 @@ class DataSourceProcessorUtility { * @throws * org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException */ - static Map getDataSourceProcessor(Path dataSourcePath) throws AutoIngestDataSourceProcessorException { - - // lookup all AutomatedIngestDataSourceProcessors - Collection processorCandidates = Lookup.getDefault().lookupAll(AutoIngestDataSourceProcessor.class); - + static Map getDataSourceProcessorForFile(Path dataSourcePath, Collection processorCandidates) throws AutoIngestDataSourceProcessorException { Map validDataSourceProcessorsMap = new HashMap<>(); for (AutoIngestDataSourceProcessor processor : processorCandidates) { int confidence = processor.canProcess(dataSourcePath); @@ -62,6 +58,49 @@ class DataSourceProcessorUtility { return validDataSourceProcessorsMap; } + + /** + * A utility method to find all Data Source Processors (DSP) that are able + * to process the input data source. Only the DSPs that implement + * AutoIngestDataSourceProcessor interface are used. Returns ordered list of + * data source processors. DSPs are ordered in descending order from highest + * confidence to lowest. + * + * @param dataSourcePath Full path to the data source + * + * @return Ordered list of data source processors. DSPs are ordered in + * descending order from highest confidence to lowest. + * + * @throws + * org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException + */ + static List getOrderedListOfDataSourceProcessors(Path dataSourcePath) throws AutoIngestDataSourceProcessorException { + // lookup all AutomatedIngestDataSourceProcessors + Collection processorCandidates = Lookup.getDefault().lookupAll(AutoIngestDataSourceProcessor.class); + return getOrderedListOfDataSourceProcessors(dataSourcePath, processorCandidates); + } + + /** + * A utility method to find all Data Source Processors (DSP) that are able + * to process the input data source. Only the DSPs that implement + * AutoIngestDataSourceProcessor interface are used. Returns ordered list of + * data source processors. DSPs are ordered in descending order from highest + * confidence to lowest. + * + * @param dataSourcePath Full path to the data source + * @param processorCandidates Collection of AutoIngestDataSourceProcessor objects to use + * + * @return Ordered list of data source processors. DSPs are ordered in + * descending order from highest confidence to lowest. + * + * @throws + * org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException + */ + static List getOrderedListOfDataSourceProcessors(Path dataSourcePath, Collection processorCandidates) throws AutoIngestDataSourceProcessorException { + Map validDataSourceProcessorsMap = getDataSourceProcessorForFile(dataSourcePath, processorCandidates); + return orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); + } + /** * A utility method to get an ordered list of data source processors. DSPs From 0c46e2d2d75d0c44ce8025fee7ca466b9c307dd2 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 12:26:25 -0500 Subject: [PATCH 32/45] Only adding logical file set if there are files remaining. Bug fixes. Optimizations --- .../autoingest/AddArchiveTask.java | 73 ++++++++++++------- .../ArchiveExtractorDSProcessor.java | 2 + 2 files changed, 47 insertions(+), 28 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index d883a9b34f..e0f80780fc 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -22,6 +22,7 @@ import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -30,6 +31,7 @@ import java.util.UUID; import java.util.logging.Level; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; +import org.openide.util.Lookup; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.LocalDiskDSProcessor; import org.sleuthkit.autopsy.casemodule.LocalFilesDSProcessor; @@ -113,11 +115,23 @@ class AddArchiveTask implements Runnable { // extract contents of ZIP archive into destination folder progressMonitor.setProgressText(String.format("Extracting archive contents to: %s", destinationFolder.toString())); List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); + int numExtractedFilesRemaining = extractedFiles.size(); + + // lookup all AutomatedIngestDataSourceProcessors so that we only do it once + Collection processorCandidates = Lookup.getDefault().lookupAll(AutoIngestDataSourceProcessor.class); // do processing - for (String file : extractedFiles) { + for (String file : extractedFiles) { + + // we only care about files, skip directories + File fileObject = new File(file); + if (fileObject.isDirectory()) { + numExtractedFilesRemaining--; + continue; + } + // identify all "valid" DSPs that can process this file - List validDataSourceProcessors = getValidDataSourceProcessors(Paths.get(file), errorMessages); + List validDataSourceProcessors = getValidDataSourceProcessors(Paths.get(file), errorMessages, processorCandidates); if (validDataSourceProcessors.isEmpty()) { continue; } @@ -145,7 +159,7 @@ class AddArchiveTask implements Runnable { } // Copy it to a different folder - FileUtils.copyFileToDirectory(new File(file), newFolder.toFile()); + FileUtils.copyFileToDirectory(fileObject, newFolder.toFile()); Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); // Try each DSP in decreasing order of confidence @@ -192,7 +206,8 @@ class AddArchiveTask implements Runnable { // one of the DSPs successfully processed the data source. delete the // copy of the data source in the original extracted archive folder. // otherwise the data source is going to be added again as a logical file. - FileUtils.deleteQuietly(Paths.get(file).toFile()); + numExtractedFilesRemaining--; + FileUtils.deleteQuietly(fileObject); } else { // none of the DSPs were able to process the data source. delete the // copy of the data source in the temporary folder. the data source is @@ -203,28 +218,30 @@ class AddArchiveTask implements Runnable { // after all archive contents have been examined (and moved to separate folders if necessary), // add remaining extracted contents as one logical file set - progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); - logger.log(Level.INFO, "Adding directory {0} as logical file set", destinationFolder.toString()); - synchronized (archiveDspLock) { - UUID taskId = UUID.randomUUID(); - currentCase.notifyAddingDataSource(taskId); - DataSource internalDataSource = new DataSource(deviceId, destinationFolder); - DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); + if (numExtractedFilesRemaining > 0) { + progressMonitor.setProgressText(String.format("Adding: %s", destinationFolder.toString())); + logger.log(Level.INFO, "Adding directory {0} as logical file set", destinationFolder.toString()); + synchronized (archiveDspLock) { + UUID taskId = UUID.randomUUID(); + currentCase.notifyAddingDataSource(taskId); + DataSource internalDataSource = new DataSource(deviceId, destinationFolder); + DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); - // folder where archive was extracted to - List pathsList = new ArrayList<>(); - pathsList.add(destinationFolder.toString()); + // folder where archive was extracted to + List pathsList = new ArrayList<>(); + pathsList.add(destinationFolder.toString()); - // use archive file name as the name of the logical file set - String archiveFileName = FilenameUtils.getName(archivePath); + // use archive file name as the name of the logical file set + String archiveFileName = FilenameUtils.getName(archivePath); - LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); - localFilesDSP.run(deviceId, archiveFileName, pathsList, progressMonitor, internalArchiveDspCallBack); + LocalFilesDSProcessor localFilesDSP = new LocalFilesDSProcessor(); + localFilesDSP.run(deviceId, archiveFileName, pathsList, progressMonitor, internalArchiveDspCallBack); - archiveDspLock.wait(); + archiveDspLock.wait(); - // at this point we got the content object(s) from the current DSP - newDataSources.addAll(internalDataSource.getContent()); + // at this point we got the content object(s) from the current DSP + newDataSources.addAll(internalDataSource.getContent()); + } } } catch (Exception ex) { criticalErrorOccurred = true; @@ -254,23 +271,23 @@ class AddArchiveTask implements Runnable { * * @return Ordered list of applicable DSPs */ - private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages) { - Map validDataSourceProcessorsMap; + private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages, + Collection processorCandidates) { + + // Get an ordered list of data source processors to try + List validDataSourceProcessors; try { - validDataSourceProcessorsMap = DataSourceProcessorUtility.getDataSourceProcessor(dataSourcePath); + validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSourcePath, processorCandidates); } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS return Collections.emptyList(); } - if (validDataSourceProcessorsMap.isEmpty()) { + if (validDataSourceProcessors.isEmpty()) { return Collections.emptyList(); } - // Get an ordered list of data source processors to try - List validDataSourceProcessors = DataSourceProcessorUtility.orderDataSourceProcessorsByConfidence(validDataSourceProcessorsMap); - for (Iterator iterator = validDataSourceProcessors.iterator(); iterator.hasNext();) { AutoIngestDataSourceProcessor selectedProcessor = iterator.next(); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index 3c31d8d2d0..81d3ab9f8f 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -28,6 +28,7 @@ import javax.swing.JPanel; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProviders; +import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; @@ -39,6 +40,7 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; * be used independently of the wizard. */ @ServiceProviders(value={ + @ServiceProvider(service=DataSourceProcessor.class), @ServiceProvider(service=AutoIngestDataSourceProcessor.class)} ) @NbBundle.Messages({ From 3c433fcee3af7f5d318a5c07d5c9b1862dd2385f Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 13:13:06 -0500 Subject: [PATCH 33/45] Optimization for getting, storing, and sorting DSP list only once --- .../autoingest/AddArchiveTask.java | 90 +++++++++++-------- 1 file changed, 51 insertions(+), 39 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index e0f80780fc..b743d7ab0b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; import java.util.UUID; import java.util.logging.Level; +import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.openide.util.Lookup; @@ -95,7 +96,7 @@ class AddArchiveTask implements Runnable { result = DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS; callback.done(result, errorMessages, newDataSources); } - + logger.log(Level.INFO, "Using Archive Extractor DSP to process archive {0} ", archivePath); // extract the archive and pass the extracted folder as input @@ -116,22 +117,23 @@ class AddArchiveTask implements Runnable { progressMonitor.setProgressText(String.format("Extracting archive contents to: %s", destinationFolder.toString())); List extractedFiles = ArchiveUtil.unpackArchiveFile(archivePath, destinationFolder.toString()); int numExtractedFilesRemaining = extractedFiles.size(); - - // lookup all AutomatedIngestDataSourceProcessors so that we only do it once - Collection processorCandidates = Lookup.getDefault().lookupAll(AutoIngestDataSourceProcessor.class); + // lookup all AutomatedIngestDataSourceProcessors so that we only do it once. + // LocalDisk, LocalFiles, and ArchiveDSP are removed from the list. + List processorCandidates = getListOfValidDataSourceProcessors(); + // do processing for (String file : extractedFiles) { - + // we only care about files, skip directories File fileObject = new File(file); if (fileObject.isDirectory()) { numExtractedFilesRemaining--; continue; } - + // identify all "valid" DSPs that can process this file - List validDataSourceProcessors = getValidDataSourceProcessors(Paths.get(file), errorMessages, processorCandidates); + List validDataSourceProcessors = getDataSourceProcessorsForFile(Paths.get(file), errorMessages, processorCandidates); if (validDataSourceProcessors.isEmpty()) { continue; } @@ -168,7 +170,7 @@ class AddArchiveTask implements Runnable { boolean success = false; for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { - logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); + logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); synchronized (archiveDspLock) { try { @@ -179,12 +181,12 @@ class AddArchiveTask implements Runnable { // at this point we got the content object(s) from the current DSP. // check whether the data source was processed successfully - if ((internalDataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS) || - internalDataSource.getContent().isEmpty()) { + if ((internalDataSource.getResultDataSourceProcessorResultCode() == CRITICAL_ERRORS) + || internalDataSource.getContent().isEmpty()) { // move onto the the next DSP that can process this data source continue; } - + // if we are here it means the data source was addedd successfully success = true; newDataSources.addAll(internalDataSource.getContent()); @@ -201,7 +203,7 @@ class AddArchiveTask implements Runnable { } } } - + if (success) { // one of the DSPs successfully processed the data source. delete the // copy of the data source in the original extracted archive folder. @@ -260,55 +262,65 @@ class AddArchiveTask implements Runnable { callback.done(result, errorMessages, newDataSources); } } - + + /** + * Get a list of data source processors. LocalDisk, LocalFiles, and + * ArchiveDSP are removed from the list. + * + * @return List of data source processors + */ + private List getListOfValidDataSourceProcessors() { + + Collection processorCandidates = Lookup.getDefault().lookupAll(AutoIngestDataSourceProcessor.class); + + List validDataSourceProcessors = processorCandidates.stream().collect(Collectors.toList()); + + for (Iterator iterator = validDataSourceProcessors.iterator(); iterator.hasNext();) { + AutoIngestDataSourceProcessor selectedProcessor = iterator.next(); + + // skip local files and local disk DSPs, only looking for "valid" data sources. + // also skip nested archive files, those will be ingested as logical files and extracted during ingest + if ((selectedProcessor instanceof LocalDiskDSProcessor) + || (selectedProcessor instanceof LocalFilesDSProcessor) + || (selectedProcessor instanceof ArchiveExtractorDSProcessor)) { + iterator.remove(); + } + } + + return validDataSourceProcessors; + } + /** * Get a list of data source processors that can process the data source of * interest. The list is sorted by confidence in decreasing order. - * LocalDisk, LocalFiles, and ArchiveDSP are removed from the list. * * @param dataSourcePath Full path to the data source - * @param errorMessages List for error messages + * @param errorMessages List for error messages + * @param errorMessages List of AutoIngestDataSourceProcessor to try * * @return Ordered list of applicable DSPs */ - private List getValidDataSourceProcessors(Path dataSourcePath, List errorMessages, - Collection processorCandidates) { - + private List getDataSourceProcessorsForFile(Path dataSourcePath, List errorMessages, + List processorCandidates) { + // Get an ordered list of data source processors to try - List validDataSourceProcessors; + List validDataSourceProcessorsForFile = Collections.emptyList(); try { - validDataSourceProcessors = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSourcePath, processorCandidates); + validDataSourceProcessorsForFile = DataSourceProcessorUtility.getOrderedListOfDataSourceProcessors(dataSourcePath, processorCandidates); } catch (AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException ex) { criticalErrorOccurred = true; errorMessages.add(ex.getMessage()); logger.log(Level.SEVERE, String.format("Critical error occurred while extracting archive %s", archivePath), ex); //NON-NLS return Collections.emptyList(); } - if (validDataSourceProcessors.isEmpty()) { - return Collections.emptyList(); - } - - for (Iterator iterator = validDataSourceProcessors.iterator(); iterator.hasNext();) { - AutoIngestDataSourceProcessor selectedProcessor = iterator.next(); - - // skip local files and local disk DSPs, only looking for "valid" data sources. - // also skip nested archive files, those will be ingested as logical files and extracted during ingest - if ( (selectedProcessor instanceof LocalDiskDSProcessor) || - (selectedProcessor instanceof LocalFilesDSProcessor) || - (selectedProcessor instanceof ArchiveExtractorDSProcessor) ) { - iterator.remove(); - } - } - - return validDataSourceProcessors; + return validDataSourceProcessorsForFile; } - /** * Create a directory in ModuleOutput folder based on input file name. A * time stamp is appended to the directory name. * - * @param fileName File name + * @param fileName File name * @param baseDirectory Base directory. Typically the case output directory. * * @return Full path to the new directory From c1808f4a034211d124652efee4b0d2320b88bd6d Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 13:23:58 -0500 Subject: [PATCH 34/45] Bug fix --- .../autopsy/experimental/autoingest/AddArchiveTask.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index b743d7ab0b..5914b95160 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -165,15 +165,14 @@ class AddArchiveTask implements Runnable { Path newFilePath = Paths.get(newFolder.toString(), FilenameUtils.getName(file)); // Try each DSP in decreasing order of confidence - UUID taskId = UUID.randomUUID(); - currentCase.notifyAddingDataSource(taskId); boolean success = false; for (AutoIngestDataSourceProcessor selectedProcessor : validDataSourceProcessors) { logger.log(Level.INFO, "Using {0} to process extracted file {1} ", new Object[]{selectedProcessor.getDataSourceType(), file}); - synchronized (archiveDspLock) { try { + UUID taskId = UUID.randomUUID(); + currentCase.notifyAddingDataSource(taskId); DataSource internalDataSource = new DataSource(deviceId, newFilePath); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); selectedProcessor.process(deviceId, newFilePath, progressMonitor, internalArchiveDspCallBack); From 3c95a7a70a5c2c2b858ea768c9e01893f1053119 Mon Sep 17 00:00:00 2001 From: Eugene Livis Date: Thu, 9 Nov 2017 13:28:00 -0500 Subject: [PATCH 35/45] ArchiveExtractorDSProcessor no longer implements ArchiveExtractorDSProcessor interface --- .../autopsy/experimental/autoingest/AddArchiveTask.java | 1 - .../autoingest/ArchiveExtractorDSProcessor.java | 6 +----- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index 5914b95160..be6a047bc3 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -26,7 +26,6 @@ import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.logging.Level; import java.util.stream.Collectors; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java index 81d3ab9f8f..0eba2b8f95 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/ArchiveExtractorDSProcessor.java @@ -23,12 +23,10 @@ import java.nio.file.Path; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import java.util.concurrent.Future; import javax.swing.JPanel; import org.openide.util.NbBundle; import org.openide.util.lookup.ServiceProvider; import org.openide.util.lookup.ServiceProviders; -import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessor; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback; import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorProgressMonitor; import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; @@ -40,7 +38,6 @@ import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor; * be used independently of the wizard. */ @ServiceProviders(value={ - @ServiceProvider(service=DataSourceProcessor.class), @ServiceProvider(service=AutoIngestDataSourceProcessor.class)} ) @NbBundle.Messages({ @@ -55,7 +52,6 @@ public class ArchiveExtractorDSProcessor implements AutoIngestDataSourceProcesso private boolean setDataSourceOptionsCalled; private final ExecutorService jobProcessingExecutor; - private Future jobProcessingTaskFuture; private static final String ARCHIVE_DSP_THREAD_NAME = "Archive-DSP-%d"; private AddArchiveTask addArchiveTask; @@ -159,7 +155,7 @@ public class ArchiveExtractorDSProcessor implements AutoIngestDataSourceProcesso */ public void run(String deviceId, String archivePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callback) { addArchiveTask = new AddArchiveTask(deviceId, archivePath, progressMonitor, callback); - jobProcessingTaskFuture = jobProcessingExecutor.submit(addArchiveTask); + jobProcessingExecutor.submit(addArchiveTask); } /** From a38763a85f3c07081ebc03e828f493bc78fcc7c9 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\zhaohui" Date: Thu, 9 Nov 2017 14:46:33 -0500 Subject: [PATCH 36/45] 3155: Busy cursor in ingest inbox --- .../BlackboardResultViewer.java | 3 + .../autopsy/directorytree/Bundle.properties | 2 - .../directorytree/Bundle_ja.properties | 4 +- .../DirectoryTreeTopComponent.java | 20 +---- .../ingest/IngestMessageDetailsPanel.java | 80 +++++-------------- 5 files changed, 26 insertions(+), 83 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/BlackboardResultViewer.java b/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/BlackboardResultViewer.java index c196adae4c..f32d82148e 100755 --- a/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/BlackboardResultViewer.java +++ b/Core/src/org/sleuthkit/autopsy/corecomponentinterfaces/BlackboardResultViewer.java @@ -24,7 +24,10 @@ import org.sleuthkit.datamodel.BlackboardArtifact; /** * Additional functionality of viewers supporting black board results such as * the directory tree + * + *@deprecated No longer used. */ +@Deprecated public interface BlackboardResultViewer { public static final String FINISHED_DISPLAY_EVT = "FINISHED_DISPLAY_EVT"; //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties index aa8f1f3a90..647c6f70d3 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle.properties @@ -59,8 +59,6 @@ DirectoryTreeFilterNode.action.collapseAll.text=Collapse All DirectoryTreeFilterNode.action.openFileSrcByAttr.text=Open File Search by Attributes DirectoryTreeFilterNode.action.runIngestMods.text=Run Ingest Modules DirectoryTreeTopComponent.action.viewArtContent.text=View Artifact Content -DirectoryTreeTopComponent.moduleErr=Module Error -DirectoryTreeTopComponent.moduleErr.msg=A module caused an error listening to DirectoryTreeTopComponent updates. See log to determine which module. Some data could be incomplete. DirectoryTreeTopComponent.showRejectedCheckBox.text=Show Rejected Results ExplorerNodeActionVisitor.action.imgDetails.title=Image Details ExplorerNodeActionVisitor.action.extUnallocToSingleFiles=Extract Unallocated Space to Single Files diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle_ja.properties index 2fb64c5bea..6f208c3300 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/Bundle_ja.properties +++ b/Core/src/org/sleuthkit/autopsy/directorytree/Bundle_ja.properties @@ -56,8 +56,6 @@ DataResultFilterNode.action.viewInDir.text=\u30c7\u30a3\u30ec\u30af\u30c8\u30ea\ DirectoryTreeFilterNode.action.openFileSrcByAttr.text=\u5c5e\u6027\u306b\u3088\u308b\u30d5\u30a1\u30a4\u30eb\u691c\u7d22\u3092\u958b\u304f DirectoryTreeFilterNode.action.runIngestMods.text=\u30a4\u30f3\u30b8\u30a7\u30b9\u30c8\u30e2\u30b8\u30e5\u30fc\u30eb\u3092\u5b9f\u884c DirectoryTreeTopComponent.action.viewArtContent.text=\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8\u30b3\u30f3\u30c6\u30f3\u30c4\u3092\u8868\u793a -DirectoryTreeTopComponent.moduleErr=\u30e2\u30b8\u30e5\u30fc\u30eb\u30a8\u30e9\u30fc -DirectoryTreeTopComponent.moduleErr.msg=DirectoryTreeTopComponent\u30a2\u30c3\u30d7\u30c7\u30fc\u30c8\u3092\u78ba\u8a8d\u4e2d\u306b\u30e2\u30b8\u30e5\u30fc\u30eb\u304c\u30a8\u30e9\u30fc\u3092\u8d77\u3053\u3057\u307e\u3057\u305f\u3002\u3069\u306e\u30e2\u30b8\u30e5\u30fc\u30eb\u304b\u30ed\u30b0\u3092\u78ba\u8a8d\u3057\u3066\u4e0b\u3055\u3044\u3002\u4e00\u90e8\u306e\u30c7\u30fc\u30bf\u304c\u4e0d\u5b8c\u5168\u304b\u3082\u3057\u308c\u307e\u305b\u3093\u3002 ExplorerNodeActionVisitor.action.imgDetails.title=\u30a4\u30e1\u30fc\u30b8\u8a73\u7d30 ExplorerNodeActionVisitor.action.extUnallocToSingleFiles=\u672a\u5272\u308a\u5f53\u3066\u9818\u57df\u5185\u306e\u30c7\u30fc\u30bf\u3092\u30b7\u30f3\u30b0\u30eb\u30d5\u30a1\u30a4\u30eb\u306b\u62bd\u51fa ExplorerNodeActionVisitor.action.fileSystemDetails.title=\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0\u8a73\u7d30 @@ -87,4 +85,4 @@ ExtractUnallocAction.done.notifyMsg.completedExtract.msg=\u30d5\u30a1\u30a4\u30e ExtractUnallocAction.done.errMsg.title=\u62bd\u51fa\u30a8\u30e9\u30fc ExtractUnallocAction.done.errMsg.msg=\u672a\u5272\u308a\u5f53\u3066\u9818\u57df\u3092\u62bd\u51fa\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\uff1a{0} DirectoryTreeFilterNode.action.collapseAll.text=\u3059\u3079\u3066\u30b3\u30e9\u30d7\u30b9 -ExtractAction.done.notifyMsg.extractErr=\u4e0b\u8a18\u306e\u30d5\u30a1\u30a4\u30eb\u306e\u62bd\u51fa\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\uff1a {0} \ No newline at end of file +ExtractAction.done.notifyMsg.extractErr=\u4e0b\u8a18\u306e\u30d5\u30a1\u30a4\u30eb\u306e\u62bd\u51fa\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\uff1a {0} diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java index 03c265daee..9738f1f4a6 100755 --- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java +++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java @@ -27,7 +27,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; -import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -56,13 +55,11 @@ import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.core.UserPreferences; -import org.sleuthkit.autopsy.corecomponentinterfaces.BlackboardResultViewer; import org.sleuthkit.autopsy.corecomponentinterfaces.CoreComponentControl; import org.sleuthkit.autopsy.corecomponentinterfaces.DataExplorer; import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent; import org.sleuthkit.autopsy.corecomponents.TableFilterNode; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.datamodel.ArtifactNodeSelectionInfo; import org.sleuthkit.autopsy.datamodel.BlackboardArtifactNode; import org.sleuthkit.autopsy.datamodel.CreditCards; @@ -99,7 +96,7 @@ import org.sleuthkit.datamodel.TskCoreException; @Messages({ "DirectoryTreeTopComponent.resultsView.title=Listing" }) -public final class DirectoryTreeTopComponent extends TopComponent implements DataExplorer, ExplorerManager.Provider, BlackboardResultViewer { +public final class DirectoryTreeTopComponent extends TopComponent implements DataExplorer, ExplorerManager.Provider { private final transient ExplorerManager em = new ExplorerManager(); private static DirectoryTreeTopComponent instance; @@ -851,7 +848,6 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat return false; } - @Override public void viewArtifact(final BlackboardArtifact art) { int typeID = art.getArtifactTypeID(); String typeName = art.getArtifactTypeName(); @@ -1064,28 +1060,14 @@ public final class DirectoryTreeTopComponent extends TopComponent implements Dat // Another thread is needed because we have to wait for dataResult to populate } - @Override public void viewArtifactContent(BlackboardArtifact art) { new ViewContextAction( NbBundle.getMessage(this.getClass(), "DirectoryTreeTopComponent.action.viewArtContent.text"), new BlackboardArtifactNode(art)).actionPerformed(null); } - @Override public void addOnFinishedListener(PropertyChangeListener l) { DirectoryTreeTopComponent.this.addPropertyChangeListener(l); } - void fireViewerComplete() { - - try { - firePropertyChange(BlackboardResultViewer.FINISHED_DISPLAY_EVT, 0, 1); - } catch (Exception e) { - LOGGER.log(Level.SEVERE, "DirectoryTreeTopComponent listener threw exception", e); //NON-NLS - MessageNotifyUtil.Notify.show(NbBundle.getMessage(this.getClass(), "DirectoryTreeTopComponent.moduleErr"), - NbBundle.getMessage(this.getClass(), - "DirectoryTreeTopComponent.moduleErr.msg"), - MessageNotifyUtil.MessageType.ERROR); - } - } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessageDetailsPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessageDetailsPanel.java index 0a8222bca0..2659ce400a 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessageDetailsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessageDetailsPanel.java @@ -21,14 +21,11 @@ package org.sleuthkit.autopsy.ingest; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; -import java.beans.PropertyChangeEvent; -import java.beans.PropertyChangeListener; import javax.swing.JMenuItem; import javax.swing.text.html.HTMLEditorKit; import javax.swing.text.html.StyleSheet; -import org.openide.util.Lookup; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.corecomponentinterfaces.BlackboardResultViewer; +import org.sleuthkit.autopsy.directorytree.DirectoryTreeTopComponent; import org.sleuthkit.autopsy.ingest.IngestMessagePanel.IngestMessageGroup; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -40,7 +37,8 @@ import org.sleuthkit.datamodel.TskException; */ class IngestMessageDetailsPanel extends javax.swing.JPanel { - private IngestMessageMainPanel mainPanel; + private final IngestMessageMainPanel mainPanel; + private final DirectoryTreeTopComponent dtc = DirectoryTreeTopComponent.findInstance(); /** * Creates new form IngestMessageDetailsPanel @@ -69,18 +67,6 @@ class IngestMessageDetailsPanel extends javax.swing.JPanel { styleSheet.addRule("td {white-space:pre-wrap;overflow:hidden;}"); //NON-NLS styleSheet.addRule("th {font-weight:bold;}"); //NON-NLS - BlackboardResultViewer v = Lookup.getDefault().lookup(BlackboardResultViewer.class); - v.addOnFinishedListener(new PropertyChangeListener() { - - @Override - public void propertyChange(PropertyChangeEvent evt) { - if (evt.getPropertyName().equals(BlackboardResultViewer.FINISHED_DISPLAY_EVT)) { - artifactViewerFinished(); - } - } - - }); - //right click messageDetailsPane.setComponentPopupMenu(rightClickMenu); ActionListener actList = new ActionListener() { @@ -193,11 +179,27 @@ class IngestMessageDetailsPanel extends javax.swing.JPanel { }// //GEN-END:initComponents private void viewContentButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewContentButtonActionPerformed - viewContent(evt); + messageDetailsPane.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + final IngestMessageGroup messageGroup = mainPanel.getMessagePanel().getSelectedMessage(); + if (messageGroup != null) { + BlackboardArtifact art = messageGroup.getData(); + if (art != null) { + dtc.viewArtifactContent(art); + } + } + messageDetailsPane.setCursor(null); }//GEN-LAST:event_viewContentButtonActionPerformed private void viewArtifactButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_viewArtifactButtonActionPerformed - viewArtifact(evt); + messageDetailsPane.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); + final IngestMessageGroup messageGroup = mainPanel.getMessagePanel().getSelectedMessage(); + if (messageGroup != null) { + BlackboardArtifact art = messageGroup.getData(); + if (art != null) { + dtc.viewArtifact(art); + } + } + messageDetailsPane.setCursor(null); }//GEN-LAST:event_viewArtifactButtonActionPerformed private void backButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_backButtonActionPerformed @@ -216,46 +218,6 @@ class IngestMessageDetailsPanel extends javax.swing.JPanel { private javax.swing.JButton viewContentButton; // End of variables declaration//GEN-END:variables - private void viewArtifact(java.awt.event.ActionEvent evt) { - artifactViewerInvoked(); - - final IngestMessageGroup messageGroup = mainPanel.getMessagePanel().getSelectedMessage(); - if (messageGroup != null) { - BlackboardArtifact art = messageGroup.getData(); - if (art != null) { - BlackboardResultViewer v = Lookup.getDefault().lookup(BlackboardResultViewer.class); - v.viewArtifact(art); - } - } - - } - - private void viewContent(java.awt.event.ActionEvent evt) { - artifactViewerInvoked(); - - final IngestMessageGroup messageGroup = mainPanel.getMessagePanel().getSelectedMessage(); - if (messageGroup != null) { - BlackboardArtifact art = messageGroup.getData(); - if (art != null) { - BlackboardResultViewer v = Lookup.getDefault().lookup(BlackboardResultViewer.class); - v.viewArtifactContent(art); - } - } - } - - private void artifactViewerInvoked() { - //viewArtifactButton.setEnabled(false); - //viewContentButton.setEnabled(false); - messageDetailsPane.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - - } - - private void artifactViewerFinished() { - //viewArtifactButton.setEnabled(true); - //viewContentButton.setEnabled(true); - messageDetailsPane.setCursor(null); - } - /** * Display the details of a given message * From ca64d9a3977450207a1c17bea13ded8599c902b0 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Fri, 10 Nov 2017 10:28:47 -0500 Subject: [PATCH 37/45] Pulling node data to determine case status. --- .../casemodule/MultiUserCaseManager.java | 142 ++++++++++++------ .../CaseNodeData.java | 2 +- .../autoingest/AutoIngestManager.java | 117 ++++++--------- 3 files changed, 145 insertions(+), 116 deletions(-) rename Core/src/org/sleuthkit/autopsy/{casemodule => coordinationservice}/CaseNodeData.java (98%) diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/MultiUserCaseManager.java b/Core/src/org/sleuthkit/autopsy/casemodule/MultiUserCaseManager.java index d0baf94bc9..bedc09d799 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/MultiUserCaseManager.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/MultiUserCaseManager.java @@ -30,6 +30,7 @@ import java.util.Date; import java.util.List; import java.util.Objects; import java.util.logging.Level; +import org.sleuthkit.autopsy.coordinationservice.CaseNodeData; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coreutils.Logger; @@ -40,7 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger; final class MultiUserCaseManager { private static final Logger LOGGER = Logger.getLogger(MultiUserCaseManager.class.getName()); - private static final String LOG_FILE_NAME = "auto_ingest_log.txt"; + private static final String ALERT_FILE_NAME = "autoingest.alert"; private static MultiUserCaseManager instance; private CoordinationService coordinationService; @@ -82,19 +83,74 @@ final class MultiUserCaseManager { List cases = new ArrayList<>(); List nodeList = coordinationService.getNodeList(CoordinationService.CategoryNode.CASES); for (String node : nodeList) { - Path casePath = Paths.get(node); - File caseFolder = casePath.toFile(); - if(caseFolder.exists()) { - File[] autFiles = caseFolder.listFiles((dir, name) -> name.toLowerCase().endsWith(".aut")); - if(autFiles != null && autFiles.length > 0) { - try { - CaseMetadata caseMetadata = new CaseMetadata(Paths.get(autFiles[0].getAbsolutePath())); - cases.add(new MultiUserCase(casePath, caseMetadata)); - } catch (CaseMetadata.CaseMetadataException | MultiUserCase.MultiUserCaseException ex) { - LOGGER.log(Level.SEVERE, String.format("Error reading case metadata file '%s'.", autFiles[0].getAbsolutePath()), ex); + Path casePath = Paths.get(node); + File caseFolder = casePath.toFile(); + if (caseFolder.exists()) { + /* + * Search for '*.aut' and 'autoingest.alert' files. + */ + File[] fileArray = caseFolder.listFiles(); + if (fileArray == null) { + continue; + } + String autFilePath = null; + boolean alertFileFound = false; + for (File file : fileArray) { + String name = file.getName().toLowerCase(); + if (autFilePath == null && name.endsWith(".aut")) { + autFilePath = file.getAbsolutePath(); + if (!alertFileFound) { + continue; + } + } + if (!alertFileFound && name.endsWith(ALERT_FILE_NAME)) { + alertFileFound = true; + } + if (autFilePath != null && alertFileFound) { + break; + } + } + + if (autFilePath != null) { + try { + CaseStatus caseStatus; + if (alertFileFound) { + /* + * When an alert file exists, ignore the node data + * and use the ALERT status. + */ + caseStatus = CaseStatus.ALERT; + } else { + byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, node); + if (rawData != null && rawData.length > 0) { + /* + * When node data exists, use the status stored + * in the node data. + */ + CaseNodeData caseNodeData = new CaseNodeData(rawData); + if (caseNodeData.getErrorsOccurred()) { + caseStatus = CaseStatus.ALERT; + } else { + caseStatus = CaseStatus.OK; + } + } else { + /* + * When no node data is available, use the 'OK' + * status to avoid confusing the end-user. + */ + caseStatus = CaseStatus.OK; } } + + CaseMetadata caseMetadata = new CaseMetadata(Paths.get(autFilePath)); + cases.add(new MultiUserCase(casePath, caseMetadata, caseStatus)); + } catch (CaseMetadata.CaseMetadataException | MultiUserCase.MultiUserCaseException ex) { + LOGGER.log(Level.SEVERE, String.format("Error reading case metadata file '%s'.", autFilePath), ex); + } catch (InterruptedException | CaseNodeData.InvalidDataException ex) { + LOGGER.log(Level.SEVERE, String.format("Error reading case node data for '%s'.", node), ex); + } } + } } return cases; } @@ -112,7 +168,7 @@ final class MultiUserCaseManager { */ Case.openAsCurrentCase(caseMetadataFilePath.toString()); } - + /** * Exception type thrown when there is an error completing a multi-user case * manager operation. @@ -143,7 +199,7 @@ final class MultiUserCaseManager { } } - + /** * A representation of a multi-user case. */ @@ -154,21 +210,22 @@ final class MultiUserCaseManager { private final String metadataFileName; private final Date createDate; private final Date lastAccessedDate; + private CaseStatus status; /** * Constructs a representation of a multi-user case * * @param caseDirectoryPath The case directory path. - * @param caseMetadata The case metadata. - * - * @throws MultiUserCaseException If no case metadata (.aut) - * file is found in the case - * directory. + * @param caseMetadata The case metadata. + * + * @throws MultiUserCaseException If no case metadata (.aut) file is + * found in the case directory. */ - MultiUserCase(Path caseDirectoryPath, CaseMetadata caseMetadata) throws MultiUserCaseException { + MultiUserCase(Path caseDirectoryPath, CaseMetadata caseMetadata, CaseStatus status) throws MultiUserCaseException { this.caseDirectoryPath = caseDirectoryPath; caseDisplayName = caseMetadata.getCaseDisplayName(); metadataFileName = caseMetadata.getFilePath().getFileName().toString(); + this.status = status; BasicFileAttributes fileAttrs = null; try { fileAttrs = Files.readAttributes(Paths.get(caseDirectoryPath.toString(), metadataFileName), BasicFileAttributes.class); @@ -194,8 +251,8 @@ final class MultiUserCaseManager { } /** - * Gets the case display name. This may differ from the name supplied to the - * directory or metadata file names if a case has been renamed. + * Gets the case display name. This may differ from the name supplied to + * the directory or metadata file names if a case has been renamed. * * @return The case display name. */ @@ -204,8 +261,8 @@ final class MultiUserCaseManager { } /** - * Gets the creation date for the case, defined as the create time of the - * case metadata file. + * Gets the creation date for the case, defined as the create time of + * the case metadata file. * * @return The case creation date. */ @@ -214,8 +271,8 @@ final class MultiUserCaseManager { } /** - * Gets the last accessed date for the case, defined as the last accessed - * time of the case metadata file. + * Gets the last accessed date for the case, defined as the last + * accessed time of the case metadata file. * * @return The last accessed date. */ @@ -225,7 +282,7 @@ final class MultiUserCaseManager { /** * Gets metadata (.aut) file name. - * + * * @return The metadata file name. */ String getMetadataFileName() { @@ -233,17 +290,12 @@ final class MultiUserCaseManager { } /** - * Gets the status of this case based on the auto ingest result file in the - * case directory. + * Gets the status of this case. * * @return See CaseStatus enum definition. */ CaseStatus getStatus() { - if(caseDirectoryPath.resolve("autoingest.alert").toFile().exists()) { - return CaseStatus.ALERT; - } else { - return CaseStatus.OK; - } + return status; } /** @@ -252,7 +304,7 @@ final class MultiUserCaseManager { * @param caseDirectoryPath The case directory path. * * @return Case metadata. - * + * * @throws CaseMetadata.CaseMetadataException If the CaseMetadata object * cannot be constructed. * @throws MultiUserCaseException If no case metadata (.aut) @@ -276,7 +328,7 @@ final class MultiUserCaseManager { } } - if(autFile == null || !autFile.isFile()) { + if (autFile == null || !autFile.isFile()) { throw new MultiUserCaseException(String.format("No case metadata (.aut) file found in the case directory '%s'.", caseDirectoryPath.toString())); } @@ -334,14 +386,15 @@ final class MultiUserCaseManager { static class LastAccessedDateDescendingComparator implements Comparator { /** - * Compares two MultiUserCase objects for order based on last accessed - * date (descending). + * Compares two MultiUserCase objects for order based on last + * accessed date (descending). * * @param object The first MultiUserCase object * @param otherObject The second MultiUserCase object. * - * @return A negative integer, zero, or a positive integer as the first - * argument is less than, equal to, or greater than the second. + * @return A negative integer, zero, or a positive integer as the + * first argument is less than, equal to, or greater than + * the second. */ @Override public int compare(MultiUserCase object, MultiUserCase otherObject) { @@ -357,8 +410,8 @@ final class MultiUserCaseManager { private static final long serialVersionUID = 1L; /** - * Constructs an exception to throw when there is a problem creating a - * multi-user case. + * Constructs an exception to throw when there is a problem creating + * a multi-user case. * * @param message The exception message. */ @@ -367,11 +420,12 @@ final class MultiUserCaseManager { } /** - * Constructs an exception to throw when there is a problem creating a - * multi-user case. + * Constructs an exception to throw when there is a problem creating + * a multi-user case. * * @param message The exception message. - * @param cause The cause of the exception, if it was an exception. + * @param cause The cause of the exception, if it was an + * exception. */ private MultiUserCaseException(String message, Throwable cause) { super(message, cause); diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java b/Core/src/org/sleuthkit/autopsy/coordinationservice/CaseNodeData.java similarity index 98% rename from Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java rename to Core/src/org/sleuthkit/autopsy/coordinationservice/CaseNodeData.java index 1eceb084e6..0b220e04b2 100755 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java +++ b/Core/src/org/sleuthkit/autopsy/coordinationservice/CaseNodeData.java @@ -16,7 +16,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.sleuthkit.autopsy.casemodule; +package org.sleuthkit.autopsy.coordinationservice; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 2ce6e2cba8..e6187877c3 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -64,7 +64,7 @@ import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.CaseActionException; import org.sleuthkit.autopsy.casemodule.CaseDetails; import org.sleuthkit.autopsy.casemodule.CaseMetadata; -import org.sleuthkit.autopsy.casemodule.CaseNodeData; +import org.sleuthkit.autopsy.coordinationservice.CaseNodeData; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.Lock; @@ -867,9 +867,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /** * Sets the coordination service manifest node. * - * Note that a new auto ingest job node data object will be created from - * the job passed in. Thus, if the data version of the node has changed, - * the node will be "upgraded" as well as updated. + * Note that a new auto ingest job node data object will be created from the + * job passed in. Thus, if the data version of the node has changed, the + * node will be "upgraded" as well as updated. * * @param job The auto ingest job. */ @@ -879,14 +879,19 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen byte[] rawData = nodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData); } - + /** - * Sets the coordination service case node. + * Sets the error flag for case node data given a case directory path. * - * @param caseNodeData The case node data. - * @param caseDirectoryPath The case directory. + * @param caseDirectoryPath The case directory path. + * + * @throws CoordinationService.CoordinationServiceException + * @throws InterruptedException + * @throws CaseNodeData.InvalidDataException */ - void updateCoordinationServiceCaseNode(CaseNodeData caseNodeData, Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException { + private void setCaseNodeDataErrorsOccurred(Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException, CaseNodeData.InvalidDataException { + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); byte[] rawData = caseNodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString(), rawData); } @@ -1052,8 +1057,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (null != manifest) { /* - * Update the mapping of case names to manifest paths that is - * used for case deletion. + * Update the mapping of case names to manifest paths that + * is used for case deletion. */ String caseName = manifest.getCaseName(); Path manifestPath = manifest.getFilePath(); @@ -1067,8 +1072,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } /* - * Add a job to the pending jobs queue, the completed jobs list, - * or do crashed job recovery, as required. + * Add a job to the pending jobs queue, the completed jobs + * list, or do crashed job recovery, as required. */ try { byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()); @@ -1088,7 +1093,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen break; case DELETED: /* - * Ignore jobs marked as "deleted." + * Ignore jobs marked as "deleted." */ break; default: @@ -1198,8 +1203,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException { /* * Create the coordination service manifest node data for the job. - * Note that getting the lock will create the node for the job - * (with no data) if it does not already exist. + * Note that getting the lock will create the node for the job (with + * no data) if it does not already exist. * * An exclusive lock is obtained before creating the node data * because another host may have already found the job, obtained an @@ -1229,7 +1234,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * the node that was processing the job crashed and the processing * status was not updated. * - * @param manifest The manifest for upgrading the node. + * @param manifest The manifest for upgrading the node. * @param jobNodeData The auto ingest job node data. * * @throws InterruptedException if the thread running the input @@ -1265,9 +1270,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } /* - * Update the coordination service manifest node for - * the job. If this fails, leave the recovery to - * another host. + * Update the coordination service manifest node for the + * job. If this fails, leave the recovery to another + * host. */ try { updateCoordinationServiceManifestNode(job); @@ -1286,9 +1291,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ if (null != caseDirectoryPath) { try { - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); } catch (CaseNodeData.InvalidDataException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); } @@ -1367,11 +1370,10 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /* * Try to upgrade/update the coordination service manifest - * node data for the job. It is possible that two hosts - * will both try to obtain the lock to do the upgrade - * operation at the same time. If this happens, the host - * that is holding the lock will complete the upgrade - * operation. + * node data for the job. It is possible that two hosts will + * both try to obtain the lock to do the upgrade operation + * at the same time. If this happens, the host that is + * holding the lock will complete the upgrade operation. */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { @@ -1953,9 +1955,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (currentJob.isCanceled()) { Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); if (null != caseDirectoryPath) { - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath); jobLogger.logJobCancelled(); } @@ -2280,9 +2280,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!dataSource.exists()) { SYS_LOGGER.log(Level.SEVERE, "Missing data source for {0}", manifestPath); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logMissingDataSource(); return null; } @@ -2330,9 +2328,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // did we find a data source processor that can process the data source if (validDataSourceProcessorsMap.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logFailedToIdentifyDataSource(); SYS_LOGGER.log(Level.WARNING, "Unsupported data source {0} for {1}", new Object[]{dataSource.getPath(), manifestPath}); // NON-NLS @@ -2358,9 +2354,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // Log that the current DSP failed and set the error flag. We consider it an error // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()}); @@ -2404,9 +2398,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2418,9 +2410,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2430,9 +2420,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{manifestPath, errorMessage}); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logFailedToAddDataSource(); break; } @@ -2446,9 +2434,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ SYS_LOGGER.log(Level.WARNING, "Cancellation while waiting for data source processor for {0}", manifestPath); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logDataSourceProcessorCancelled(); } } @@ -2502,9 +2488,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!cancelledModules.isEmpty()) { SYS_LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); for (String module : snapshot.getCancelledDataSourceIngestModules()) { SYS_LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); jobLogger.logIngestModuleCancelled(module); @@ -2514,9 +2498,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } else { currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logAnalysisCancelled(); CancellationReason cancellationReason = snapshot.getCancellationReason(); if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { @@ -2529,17 +2511,13 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logIngestModuleStartupErrors(); throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); } else { SYS_LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logAnalysisStartupError(); throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } @@ -2548,9 +2526,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logIngestJobSettingsErrors(); throw new AnalysisStartupException("Error(s) in ingest job settings"); } @@ -2593,9 +2569,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } catch (FileExportException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error doing file export for %s", manifestPath), ex); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logFileExportError(); } } @@ -2938,6 +2912,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen PARTIALLY_DELETED, FULLY_DELETED } + static final class AutoIngestManagerException extends Exception { private static final long serialVersionUID = 1L; @@ -2952,4 +2927,4 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } -} \ No newline at end of file +} From 5edea4c5955879c5e2fbc280219c21746de16b6c Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 13 Nov 2017 16:26:14 -0500 Subject: [PATCH 38/45] API added for getting data source content size. --- .../autopsy/casemodule/CaseNodeData.java | 141 +++++++++++ .../autoingest/AutoIngestControlPanel.form | 238 ++++++++++++++---- .../autoingest/AutoIngestControlPanel.java | 188 +++++++++----- .../autoingest/AutoIngestDashboard.java | 6 +- .../autoingest/AutoIngestManager.java | 197 ++++++++------- .../AutoIngestMetricsCollector.java | 160 ++++++++++++ .../autoingest/AutoIngestMetricsDialog.java | 50 +++- .../autoingest/AutoIngestMonitor.java | 95 +------ 8 files changed, 769 insertions(+), 306 deletions(-) create mode 100755 Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java create mode 100755 Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsCollector.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java new file mode 100755 index 0000000000..f4308e8954 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java @@ -0,0 +1,141 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.casemodule; + +import java.nio.BufferUnderflowException; +import java.nio.ByteBuffer; + +/** + * An object that converts case data for a case directory coordination service + * node to and from byte arrays. + */ +public final class CaseNodeData { + + private static final int CURRENT_VERSION = 0; + + private int version; + private boolean errorsOccurred; + + /** + * Gets the current version of the case directory coordination service node + * data. + * + * @return The version number. + */ + public static int getCurrentVersion() { + return CaseNodeData.CURRENT_VERSION; + } + + /** + * Uses coordination service node data to construct an object that converts + * case data for a case directory coordination service node to and from byte + * arrays. + * + * @param nodeData The raw bytes received from the coordination service. + * + * @throws InvalidDataException If the node data buffer is smaller than + * expected. + */ + public CaseNodeData(byte[] nodeData) throws InvalidDataException { + if(nodeData == null || nodeData.length == 0) { + this.version = CURRENT_VERSION; + this.errorsOccurred = false; + } else { + /* + * Get fields from node data. + */ + ByteBuffer buffer = ByteBuffer.wrap(nodeData); + try { + if (buffer.hasRemaining()) { + this.version = buffer.getInt(); + + /* + * Flags bit format: 76543210 + * 0-6 --> reserved for future use + * 7 --> errorsOccurred + */ + byte flags = buffer.get(); + this.errorsOccurred = (flags < 0); + } + } catch (BufferUnderflowException ex) { + throw new InvalidDataException("Node data is incomplete", ex); + } + } + } + + /** + * Gets whether or not any errors occurred during the processing of the job. + * + * @return True or false. + */ + public boolean getErrorsOccurred() { + return this.errorsOccurred; + } + + /** + * Sets whether or not any errors occurred during the processing of job. + * + * @param errorsOccurred True or false. + */ + public void setErrorsOccurred(boolean errorsOccurred) { + this.errorsOccurred = errorsOccurred; + } + + /** + * Gets the node data version number. + * + * @return The version number. + */ + public int getVersion() { + return this.version; + } + + /** + * Gets the node data as a byte array that can be sent to the coordination + * service. + * + * @return The node data as a byte array. + */ + public byte[] toArray() { + ByteBuffer buffer = ByteBuffer.allocate(5); + + buffer.putInt(this.version); + buffer.put((byte)(this.errorsOccurred ? 0x80 : 0)); + + // Prepare the array + byte[] array = new byte[buffer.position()]; + buffer.rewind(); + buffer.get(array, 0, array.length); + + return array; + } + + public final static class InvalidDataException extends Exception { + + private static final long serialVersionUID = 1L; + + private InvalidDataException(String message) { + super(message); + } + + private InvalidDataException(String message, Throwable cause) { + super(message, cause); + } + } +} \ No newline at end of file diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.form b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.form index cec9d21492..f00e28374c 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.form +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.form @@ -17,46 +17,22 @@ - + - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - - - - + + + + + + + + + @@ -73,8 +49,36 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - @@ -99,9 +103,9 @@ - + - + @@ -135,16 +139,13 @@ - - - - - - - - - - + + + + + + + @@ -169,6 +170,7 @@ + @@ -192,6 +194,7 @@ + @@ -215,6 +218,7 @@ + @@ -231,6 +235,15 @@ + + + + + + + + + @@ -244,6 +257,15 @@ + + + + + + + + + @@ -287,6 +309,15 @@ + + + + + + + + + @@ -300,6 +331,15 @@ + + + + + + + + + @@ -313,6 +353,15 @@ + + + + + + + + + @@ -327,6 +376,15 @@ + + + + + + + + + @@ -340,6 +398,15 @@ + + + + + + + + + @@ -353,6 +420,15 @@ + + + + + + + + + @@ -366,6 +442,15 @@ + + + + + + + + + @@ -379,6 +464,15 @@ + + + + + + + + + @@ -419,6 +513,15 @@ + + + + + + + + + @@ -453,20 +556,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + \ No newline at end of file diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java index 7b893ae0b9..8024fbf93a 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java @@ -73,6 +73,7 @@ import org.sleuthkit.autopsy.ingest.IngestProgressSnapshotDialog; * one such panel per node. */ @Messages({ + "AutoIngestControlPanel.bnClusterMetrics.text=Cluster Metrics", "AutoIngestControlPanel.bnPause.text=Pause", "AutoIngestControlPanel.bnPause.paused=Paused", "AutoIngestControlPanel.bnPause.running=Running", @@ -116,7 +117,7 @@ import org.sleuthkit.autopsy.ingest.IngestProgressSnapshotDialog; "AutoIngestControlPanel.bnPrioritizeJob.actionCommand=", "AutoIngestControlPanel.lbServicesStatus.text=Services Status:", "AutoIngestControlPanel.tbServicesStatusMessage.text=", - "AutoIngestControlPanel.bnOpenLogDir.text=Open System Logs Directory", + "AutoIngestControlPanel.bnOpenLogDir.text=Open System Logs Folder", "AutoIngestControlPanel.bnReprocessJob.text=Reprocess Job", "AutoIngestControlPanel.bnPrioritizeFolder.label=", "AutoIngestControlPanel.Cancelling=Cancelling...", @@ -1198,10 +1199,12 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { lbServicesStatus = new javax.swing.JLabel(); tbServicesStatusMessage = new javax.swing.JTextField(); bnOpenLogDir = new javax.swing.JButton(); + bnClusterMetrics = new javax.swing.JButton(); bnReprocessJob = new javax.swing.JButton(); pendingTable.setModel(pendingTableModel); pendingTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.pendingTable.toolTipText")); // NOI18N + pendingTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); pendingTable.setRowHeight(20); pendingTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @@ -1219,6 +1222,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { runningTable.setModel(runningTableModel); runningTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.runningTable.toolTipText")); // NOI18N + runningTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); runningTable.setRowHeight(20); runningTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @@ -1236,6 +1240,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { completedTable.setModel(completedTableModel); completedTable.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.completedTable.toolTipText")); // NOI18N + completedTable.setAutoResizeMode(javax.swing.JTable.AUTO_RESIZE_ALL_COLUMNS); completedTable.setRowHeight(20); completedTable.setSelectionModel(new DefaultListSelectionModel() { private static final long serialVersionUID = 1L; @@ -1253,6 +1258,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnCancelJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelJob.text")); // NOI18N bnCancelJob.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelJob.toolTipText")); // NOI18N + bnCancelJob.setMaximumSize(new java.awt.Dimension(162, 23)); + bnCancelJob.setMinimumSize(new java.awt.Dimension(162, 23)); + bnCancelJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnCancelJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnCancelJobActionPerformed(evt); @@ -1261,6 +1269,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnDeleteCase, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeleteCase.text")); // NOI18N bnDeleteCase.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnDeleteCase.toolTipText")); // NOI18N + bnDeleteCase.setMaximumSize(new java.awt.Dimension(162, 23)); + bnDeleteCase.setMinimumSize(new java.awt.Dimension(162, 23)); + bnDeleteCase.setPreferredSize(new java.awt.Dimension(162, 23)); bnDeleteCase.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnDeleteCaseActionPerformed(evt); @@ -1278,6 +1289,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnRefresh, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnRefresh.text")); // NOI18N bnRefresh.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnRefresh.toolTipText")); // NOI18N + bnRefresh.setMaximumSize(new java.awt.Dimension(162, 23)); + bnRefresh.setMinimumSize(new java.awt.Dimension(162, 23)); + bnRefresh.setPreferredSize(new java.awt.Dimension(162, 23)); bnRefresh.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnRefreshActionPerformed(evt); @@ -1286,6 +1300,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnCancelModule, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelModule.text")); // NOI18N bnCancelModule.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnCancelModule.toolTipText")); // NOI18N + bnCancelModule.setMaximumSize(new java.awt.Dimension(162, 23)); + bnCancelModule.setMinimumSize(new java.awt.Dimension(162, 23)); + bnCancelModule.setPreferredSize(new java.awt.Dimension(162, 23)); bnCancelModule.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnCancelModuleActionPerformed(evt); @@ -1294,6 +1311,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnExit, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnExit.text")); // NOI18N bnExit.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnExit.toolTipText")); // NOI18N + bnExit.setMaximumSize(new java.awt.Dimension(162, 23)); + bnExit.setMinimumSize(new java.awt.Dimension(162, 23)); + bnExit.setPreferredSize(new java.awt.Dimension(162, 23)); bnExit.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnExitActionPerformed(evt); @@ -1303,6 +1323,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnOptions, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOptions.text")); // NOI18N bnOptions.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOptions.toolTipText")); // NOI18N bnOptions.setEnabled(false); + bnOptions.setMaximumSize(new java.awt.Dimension(162, 23)); + bnOptions.setMinimumSize(new java.awt.Dimension(162, 23)); + bnOptions.setPreferredSize(new java.awt.Dimension(162, 23)); bnOptions.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnOptionsActionPerformed(evt); @@ -1311,6 +1334,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnShowProgress, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowProgress.text")); // NOI18N bnShowProgress.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowProgress.toolTipText")); // NOI18N + bnShowProgress.setMaximumSize(new java.awt.Dimension(162, 23)); + bnShowProgress.setMinimumSize(new java.awt.Dimension(162, 23)); + bnShowProgress.setPreferredSize(new java.awt.Dimension(162, 23)); bnShowProgress.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnShowProgressActionPerformed(evt); @@ -1319,6 +1345,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnPause, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.text")); // NOI18N bnPause.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPause.toolTipText")); // NOI18N + bnPause.setMaximumSize(new java.awt.Dimension(162, 23)); + bnPause.setMinimumSize(new java.awt.Dimension(162, 23)); + bnPause.setPreferredSize(new java.awt.Dimension(162, 23)); bnPause.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPauseActionPerformed(evt); @@ -1327,6 +1356,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnPrioritizeCase, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeCase.text")); // NOI18N bnPrioritizeCase.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeCase.toolTipText")); // NOI18N + bnPrioritizeCase.setMaximumSize(new java.awt.Dimension(162, 23)); + bnPrioritizeCase.setMinimumSize(new java.awt.Dimension(162, 23)); + bnPrioritizeCase.setPreferredSize(new java.awt.Dimension(162, 23)); bnPrioritizeCase.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPrioritizeCaseActionPerformed(evt); @@ -1335,6 +1367,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnShowCaseLog, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowCaseLog.text")); // NOI18N bnShowCaseLog.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnShowCaseLog.toolTipText")); // NOI18N + bnShowCaseLog.setMaximumSize(new java.awt.Dimension(162, 23)); + bnShowCaseLog.setMinimumSize(new java.awt.Dimension(162, 23)); + bnShowCaseLog.setPreferredSize(new java.awt.Dimension(162, 23)); bnShowCaseLog.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnShowCaseLogActionPerformed(evt); @@ -1352,6 +1387,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { org.openide.awt.Mnemonics.setLocalizedText(bnPrioritizeJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.text")); // NOI18N bnPrioritizeJob.setToolTipText(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.toolTipText")); // NOI18N bnPrioritizeJob.setActionCommand(org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnPrioritizeJob.actionCommand")); // NOI18N + bnPrioritizeJob.setMaximumSize(new java.awt.Dimension(162, 23)); + bnPrioritizeJob.setMinimumSize(new java.awt.Dimension(162, 23)); + bnPrioritizeJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnPrioritizeJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnPrioritizeJobActionPerformed(evt); @@ -1367,13 +1405,29 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { tbServicesStatusMessage.setBorder(null); org.openide.awt.Mnemonics.setLocalizedText(bnOpenLogDir, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnOpenLogDir.text")); // NOI18N + bnOpenLogDir.setMaximumSize(new java.awt.Dimension(162, 23)); + bnOpenLogDir.setMinimumSize(new java.awt.Dimension(162, 23)); + bnOpenLogDir.setPreferredSize(new java.awt.Dimension(162, 23)); bnOpenLogDir.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnOpenLogDirActionPerformed(evt); } }); + org.openide.awt.Mnemonics.setLocalizedText(bnClusterMetrics, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnClusterMetrics.text")); // NOI18N + bnClusterMetrics.setMaximumSize(new java.awt.Dimension(162, 23)); + bnClusterMetrics.setMinimumSize(new java.awt.Dimension(162, 23)); + bnClusterMetrics.setPreferredSize(new java.awt.Dimension(162, 23)); + bnClusterMetrics.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + bnClusterMetricsActionPerformed(evt); + } + }); + org.openide.awt.Mnemonics.setLocalizedText(bnReprocessJob, org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.bnReprocessJob.text")); // NOI18N + bnReprocessJob.setMaximumSize(new java.awt.Dimension(162, 23)); + bnReprocessJob.setMinimumSize(new java.awt.Dimension(162, 23)); + bnReprocessJob.setPreferredSize(new java.awt.Dimension(162, 23)); bnReprocessJob.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { bnReprocessJobActionPerformed(evt); @@ -1387,38 +1441,20 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) - .addComponent(lbPending, javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(pendingScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 920, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(bnPrioritizeCase, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addComponent(bnPrioritizeJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addGroup(layout.createSequentialGroup() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() - .addComponent(bnPause) - .addGap(18, 18, 18) - .addComponent(bnRefresh, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(18, 18, 18) - .addComponent(bnOptions) - .addGap(18, 18, 18) - .addComponent(bnOpenLogDir) - .addGap(18, 18, 18) - .addComponent(bnExit, javax.swing.GroupLayout.PREFERRED_SIZE, 94, javax.swing.GroupLayout.PREFERRED_SIZE)) - .addGroup(layout.createSequentialGroup() - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(runningScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 920, javax.swing.GroupLayout.PREFERRED_SIZE) - .addComponent(completedScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 920, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addComponent(bnPause, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) - .addComponent(bnCancelJob, javax.swing.GroupLayout.PREFERRED_SIZE, 117, Short.MAX_VALUE) - .addComponent(bnShowProgress, javax.swing.GroupLayout.PREFERRED_SIZE, 116, Short.MAX_VALUE) - .addComponent(bnCancelModule, javax.swing.GroupLayout.PREFERRED_SIZE, 117, Short.MAX_VALUE) - .addComponent(bnDeleteCase, javax.swing.GroupLayout.PREFERRED_SIZE, 117, Short.MAX_VALUE) - .addComponent(bnShowCaseLog, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addComponent(bnReprocessJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) + .addComponent(bnRefresh, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(bnOptions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(bnOpenLogDir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(bnClusterMetrics, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(bnExit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addComponent(lbStatus) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) @@ -1429,11 +1465,32 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { .addComponent(lbServicesStatus) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(tbServicesStatusMessage, javax.swing.GroupLayout.PREFERRED_SIZE, 861, javax.swing.GroupLayout.PREFERRED_SIZE))) - .addGap(0, 0, Short.MAX_VALUE))) - .addContainerGap()) + .addGap(0, 0, Short.MAX_VALUE)) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(lbPending) + .addGroup(layout.createSequentialGroup() + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(runningScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(completedScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) + .addComponent(bnCancelJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnShowProgress, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnCancelModule, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnDeleteCase, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnShowCaseLog, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnReprocessJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) + .addGroup(layout.createSequentialGroup() + .addComponent(pendingScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 1021, javax.swing.GroupLayout.PREFERRED_SIZE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) + .addComponent(bnPrioritizeCase, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(bnPrioritizeJob, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) ); - layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnDeleteCase, bnExit, bnOpenLogDir, bnOptions, bnPause, bnRefresh, bnShowProgress}); + layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnDeleteCase, bnShowProgress}); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -1453,48 +1510,47 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { .addComponent(pendingScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 215, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addGap(82, 82, 82) - .addComponent(bnPrioritizeCase) + .addComponent(bnPrioritizeCase, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(bnPrioritizeJob))) + .addComponent(bnPrioritizeJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbRunning) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(34, 34, 34) - .addComponent(bnShowProgress) + .addComponent(bnShowProgress, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(bnCancelJob) + .addComponent(bnCancelJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(bnCancelModule)) + .addComponent(bnCancelModule, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(runningScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 133, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(68, 68, 68) - .addComponent(bnReprocessJob) + .addComponent(bnReprocessJob, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(bnDeleteCase) + .addComponent(bnDeleteCase, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(bnShowCaseLog)) + .addComponent(bnShowCaseLog, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(lbCompleted) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(completedScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 179, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(bnExit) - .addComponent(bnOpenLogDir)) - .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) - .addComponent(bnPause) - .addComponent(bnRefresh) - .addComponent(bnOptions))))) + .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(bnPause, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(bnRefresh, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(bnOptions, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(bnOpenLogDir, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(bnClusterMetrics, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addComponent(bnExit, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))) .addContainerGap()) ); - layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnDeleteCase, bnExit, bnOpenLogDir, bnOptions, bnRefresh, bnShowProgress}); + layout.linkSize(javax.swing.SwingConstants.VERTICAL, new java.awt.Component[] {bnCancelJob, bnCancelModule, bnClusterMetrics, bnDeleteCase, bnExit, bnOpenLogDir, bnOptions, bnPrioritizeCase, bnPrioritizeJob, bnRefresh, bnShowProgress}); }// //GEN-END:initComponents @@ -1523,11 +1579,11 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { "AutoIngestControlPanel.DeletionFailed=Deletion failed for job" }) private void bnDeleteCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnDeleteCaseActionPerformed - if (completedTable.getModel().getRowCount() < 0 || completedTable.getSelectedRow() < 0) { + if (completedTableModel.getRowCount() < 0 || completedTable.getSelectedRow() < 0) { return; } - String caseName = (String) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal()); + String caseName = (String) completedTable.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal()); Object[] options = { org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.Delete"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.DoNotDelete") @@ -1544,8 +1600,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { if (reply == JOptionPane.YES_OPTION) { bnDeleteCase.setEnabled(false); bnShowCaseLog.setEnabled(false); - if (completedTable.getModel().getRowCount() > 0 && completedTable.getSelectedRow() >= 0) { - Path caseDirectoryPath = (Path) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); + if (completedTableModel.getRowCount() > 0 && completedTable.getSelectedRow() >= 0) { + Path caseDirectoryPath = (Path) completedTableModel.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); completedTable.clearSelection(); this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); CaseDeletionResult result = manager.deleteCase(caseName, caseDirectoryPath); @@ -1691,10 +1747,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { */ @Messages({"AutoIngestControlPanel.casePrioritization.errorMessage=An error occurred when prioritizing the case. Some or all jobs may not have been prioritized."}) private void bnPrioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeCaseActionPerformed - if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { + if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - - String caseName = (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal())).toString(); + String caseName = (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal())).toString(); try { manager.prioritizeCase(caseName); } catch (AutoIngestManager.AutoIngestManagerException ex) { @@ -1720,9 +1775,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { }) private void bnShowCaseLogActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnShowCaseLogActionPerformed try { - int selectedRow = completedTable.convertRowIndexToModel(completedTable.getSelectedRow()); + int selectedRow = completedTable.getSelectedRow(); if (selectedRow != -1) { - Path caseDirectoryPath = (Path) completedTable.getModel().getValueAt(selectedRow, JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); + Path caseDirectoryPath = (Path) completedTableModel.getValueAt(selectedRow, JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); if (null != caseDirectoryPath) { Path pathToLog = AutoIngestJobLogger.getLogPath(caseDirectoryPath); if (pathToLog.toFile().exists()) { @@ -1751,9 +1806,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { @Messages({"AutoIngestControlPanel.jobPrioritization.errorMessage=An error occurred when prioritizing the job."}) private void bnPrioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeJobActionPerformed - if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { + if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - Path manifestFilePath = (Path) (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); + Path manifestFilePath = (Path) (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); try { manager.prioritizeJob(manifestFilePath); } catch (AutoIngestManager.AutoIngestManagerException ex) { @@ -1780,19 +1835,28 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { }//GEN-LAST:event_bnOpenLogDirActionPerformed private void bnReprocessJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnReprocessJobActionPerformed - if (completedTable.getModel().getRowCount() < 0 || completedTable.getSelectedRow() < 0) { + if (completedTableModel.getRowCount() < 0 || completedTable.getSelectedRow() < 0) { return; } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - Path manifestPath = (Path) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()); + Path manifestPath = (Path) completedTableModel.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()); manager.reprocessJob(manifestPath); refreshTables(); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); }//GEN-LAST:event_bnReprocessJobActionPerformed + private void bnClusterMetricsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnClusterMetricsActionPerformed + try { + new AutoIngestMetricsDialog(this.getTopLevelAncestor()); + } catch (AutoIngestMetricsDialog.AutoIngestMetricsDialogException ex) { + MessageNotifyUtil.Message.error(ex.getMessage()); + } + }//GEN-LAST:event_bnClusterMetricsActionPerformed + // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton bnCancelJob; private javax.swing.JButton bnCancelModule; + private javax.swing.JButton bnClusterMetrics; private javax.swing.JButton bnDeleteCase; private javax.swing.JButton bnExit; private javax.swing.JButton bnOpenLogDir; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java index d53503a72f..421b1f92f0 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDashboard.java @@ -873,7 +873,11 @@ final class AutoIngestDashboard extends JPanel implements Observer { }//GEN-LAST:event_prioritizeCaseButtonActionPerformed private void clusterMetricsButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_clusterMetricsButtonActionPerformed - new AutoIngestMetricsDialog(this.getTopLevelAncestor(), autoIngestMonitor); + try { + new AutoIngestMetricsDialog(this.getTopLevelAncestor()); + } catch (AutoIngestMetricsDialog.AutoIngestMetricsDialogException ex) { + MessageNotifyUtil.Message.error(ex.getMessage()); + } }//GEN-LAST:event_clusterMetricsButtonActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 1cbe9af5a6..2ce6e2cba8 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -64,7 +64,7 @@ import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.CaseActionException; import org.sleuthkit.autopsy.casemodule.CaseDetails; import org.sleuthkit.autopsy.casemodule.CaseMetadata; -import org.sleuthkit.autopsy.coordinationservice.CaseNodeData; +import org.sleuthkit.autopsy.casemodule.CaseNodeData; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.Lock; @@ -498,7 +498,6 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } SYS_LOGGER.log(Level.INFO, "Starting input scan of {0}", rootInputDirectory); InputDirScanner scanner = new InputDirScanner(); - scanner.scan(); SYS_LOGGER.log(Level.INFO, "Completed input scan of {0}", rootInputDirectory); } @@ -554,12 +553,10 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!prioritizedJobs.isEmpty()) { ++maxPriority; for (AutoIngestJob job : prioritizedJobs) { - int oldPriority = job.getPriority(); - job.setPriority(maxPriority); try { this.updateCoordinationServiceManifestNode(job); + job.setPriority(maxPriority); } catch (CoordinationServiceException | InterruptedException ex) { - job.setPriority(oldPriority); throw new AutoIngestManagerException("Error updating case priority", ex); } } @@ -610,14 +607,12 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ if (null != prioritizedJob) { ++maxPriority; - int oldPriority = prioritizedJob.getPriority(); - prioritizedJob.setPriority(maxPriority); try { this.updateCoordinationServiceManifestNode(prioritizedJob); } catch (CoordinationServiceException | InterruptedException ex) { - prioritizedJob.setPriority(oldPriority); throw new AutoIngestManagerException("Error updating job priority", ex); } + prioritizedJob.setPriority(maxPriority); } Collections.sort(pendingJobs, new AutoIngestJob.PriorityComparator()); @@ -872,9 +867,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /** * Sets the coordination service manifest node. * - * Note that a new auto ingest job node data object will be created from the - * job passed in. Thus, if the data version of the node has changed, the - * node will be "upgraded" as well as updated. + * Note that a new auto ingest job node data object will be created from + * the job passed in. Thus, if the data version of the node has changed, + * the node will be "upgraded" as well as updated. * * @param job The auto ingest job. */ @@ -884,19 +879,14 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen byte[] rawData = nodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData); } - + /** - * Sets the error flag for case node data given a case directory path. + * Sets the coordination service case node. * - * @param caseDirectoryPath The case directory path. - * - * @throws CoordinationService.CoordinationServiceException - * @throws InterruptedException - * @throws CaseNodeData.InvalidDataException + * @param caseNodeData The case node data. + * @param caseDirectoryPath The case directory. */ - private void setCaseNodeDataErrorsOccurred(Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException, CaseNodeData.InvalidDataException { - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); + void updateCoordinationServiceCaseNode(CaseNodeData caseNodeData, Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException { byte[] rawData = caseNodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString(), rawData); } @@ -1062,8 +1052,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (null != manifest) { /* - * Update the mapping of case names to manifest paths that - * is used for case deletion. + * Update the mapping of case names to manifest paths that is + * used for case deletion. */ String caseName = manifest.getCaseName(); Path manifestPath = manifest.getFilePath(); @@ -1077,8 +1067,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } /* - * Add a job to the pending jobs queue, the completed jobs - * list, or do crashed job recovery, as required. + * Add a job to the pending jobs queue, the completed jobs list, + * or do crashed job recovery, as required. */ try { byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()); @@ -1098,7 +1088,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen break; case DELETED: /* - * Ignore jobs marked as "deleted." + * Ignore jobs marked as "deleted." */ break; default: @@ -1208,8 +1198,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException { /* * Create the coordination service manifest node data for the job. - * Note that getting the lock will create the node for the job (with - * no data) if it does not already exist. + * Note that getting the lock will create the node for the job + * (with no data) if it does not already exist. * * An exclusive lock is obtained before creating the node data * because another host may have already found the job, obtained an @@ -1239,15 +1229,13 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * the node that was processing the job crashed and the processing * status was not updated. * - * @param manifest The manifest for upgrading the node. + * @param manifest The manifest for upgrading the node. * @param jobNodeData The auto ingest job node data. * - * @throws InterruptedException if the thread running the input - * directory scan task is interrupted - * while blocked, i.e., if auto ingest is - * shutting down. - * @throws AutoIngestJobException if there is an issue creating a new - * AutoIngestJob object. + * @throws InterruptedException if the thread running the input + * directory scan task is interrupted while + * blocked, i.e., if auto ingest is + * shutting down. */ private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData jobNodeData) throws InterruptedException, AutoIngestJobException { /* @@ -1260,35 +1248,51 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (null != manifestLock) { SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath); try { - Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); - /* * Create the recovery job. */ AutoIngestJob job = new AutoIngestJob(jobNodeData); int numberOfCrashes = job.getNumberOfCrashes(); - if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { - ++numberOfCrashes; - job.setNumberOfCrashes(numberOfCrashes); - if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { - job.setCompletedDate(new Date(0)); - } else { - job.setCompletedDate(Date.from(Instant.now())); - } - } - + ++numberOfCrashes; + job.setNumberOfCrashes(numberOfCrashes); + job.setCompletedDate(new Date(0)); + Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); if (null != caseDirectoryPath) { job.setCaseDirectoryPath(caseDirectoryPath); job.setErrorsOccurred(true); - try { - setCaseNodeDataErrorsOccurred(caseDirectoryPath); - } catch (CaseNodeData.InvalidDataException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); - } } else { job.setErrorsOccurred(false); } + /* + * Update the coordination service manifest node for + * the job. If this fails, leave the recovery to + * another host. + */ + try { + updateCoordinationServiceManifestNode(job); + if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { + newPendingJobsList.add(job); + } else { + newCompletedJobsList.add(new AutoIngestJob(jobNodeData)); + } + } catch (CoordinationServiceException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); + return; + } + + /* + * Update the case node data and do the logging. + */ + if (null != caseDirectoryPath) { + try { + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + } catch (CaseNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); + } + } if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { job.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING); if (null != caseDirectoryPath) { @@ -1302,32 +1306,13 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); if (null != caseDirectoryPath) { try { - new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryNoRetry(); + new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), jobNodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry(); } catch (AutoIngestJobLoggerException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); } } } - /* - * Update the coordination service node for the job. If - * this fails, leave the recovery to another host. - */ - try { - updateCoordinationServiceManifestNode(job); - } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); - return; - } - - jobNodeData = new AutoIngestJobNodeData(job); - - if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { - newPendingJobsList.add(job); - } else { - newCompletedJobsList.add(new AutoIngestJob(jobNodeData)); - } - } finally { try { manifestLock.release(); @@ -1382,10 +1367,11 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /* * Try to upgrade/update the coordination service manifest - * node data for the job. It is possible that two hosts will - * both try to obtain the lock to do the upgrade operation - * at the same time. If this happens, the host that is - * holding the lock will complete the upgrade operation. + * node data for the job. It is possible that two hosts + * will both try to obtain the lock to do the upgrade + * operation at the same time. If this happens, the host + * that is holding the lock will complete the upgrade + * operation. */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { @@ -1967,7 +1953,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (currentJob.isCanceled()) { Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); if (null != caseDirectoryPath) { - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath); jobLogger.logJobCancelled(); } @@ -2292,7 +2280,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!dataSource.exists()) { SYS_LOGGER.log(Level.SEVERE, "Missing data source for {0}", manifestPath); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logMissingDataSource(); return null; } @@ -2340,7 +2330,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // did we find a data source processor that can process the data source if (validDataSourceProcessorsMap.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logFailedToIdentifyDataSource(); SYS_LOGGER.log(Level.WARNING, "Unsupported data source {0} for {1}", new Object[]{dataSource.getPath(), manifestPath}); // NON-NLS @@ -2366,7 +2358,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // Log that the current DSP failed and set the error flag. We consider it an error // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()}); @@ -2410,7 +2404,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2422,7 +2418,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2432,7 +2430,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{manifestPath, errorMessage}); } currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logFailedToAddDataSource(); break; } @@ -2446,7 +2446,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ SYS_LOGGER.log(Level.WARNING, "Cancellation while waiting for data source processor for {0}", manifestPath); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logDataSourceProcessorCancelled(); } } @@ -2500,7 +2502,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!cancelledModules.isEmpty()) { SYS_LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); for (String module : snapshot.getCancelledDataSourceIngestModules()) { SYS_LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); jobLogger.logIngestModuleCancelled(module); @@ -2510,7 +2514,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } else { currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logAnalysisCancelled(); CancellationReason cancellationReason = snapshot.getCancellationReason(); if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { @@ -2523,13 +2529,17 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logIngestModuleStartupErrors(); throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); } else { SYS_LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logAnalysisStartupError(); throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } @@ -2538,7 +2548,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); } currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logIngestJobSettingsErrors(); throw new AnalysisStartupException("Error(s) in ingest job settings"); } @@ -2581,7 +2593,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } catch (FileExportException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error doing file export for %s", manifestPath), ex); currentJob.setErrorsOccurred(true); - setCaseNodeDataErrorsOccurred(caseDirectoryPath); + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); + updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); jobLogger.logFileExportError(); } } @@ -2924,7 +2938,6 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen PARTIALLY_DELETED, FULLY_DELETED } - static final class AutoIngestManagerException extends Exception { private static final long serialVersionUID = 1L; @@ -2939,4 +2952,4 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } -} +} \ No newline at end of file diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsCollector.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsCollector.java new file mode 100755 index 0000000000..9567b70470 --- /dev/null +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsCollector.java @@ -0,0 +1,160 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011-2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.experimental.autoingest; + +import java.util.ArrayList; +import java.util.List; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coordinationservice.CoordinationService; +import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; +import org.sleuthkit.autopsy.coreutils.Logger; + +/** + * Collects metrics for an auto ingest cluster. + */ +final class AutoIngestMetricsCollector { + + private static final Logger LOGGER = Logger.getLogger(AutoIngestMetricsCollector.class.getName()); + private CoordinationService coordinationService; + + /** + * Creates an instance of the AutoIngestMetricsCollector. + * + * @throws AutoIngestMetricsCollector.AutoIngestMetricsCollectorException + */ + AutoIngestMetricsCollector() throws AutoIngestMetricsCollectorException { + try { + coordinationService = CoordinationService.getInstance(); + } catch (CoordinationServiceException ex) { + throw new AutoIngestMetricsCollectorException("Failed to get coordination service", ex); //NON-NLS + } + } + + /** + * Gets a new metrics snapshot from the coordination service for an auto + * ingest cluster. + * + * @return The metrics snapshot. + */ + MetricsSnapshot queryCoordinationServiceForMetrics() { + try { + MetricsSnapshot newMetricsSnapshot = new MetricsSnapshot(); + List nodeList = coordinationService.getNodeList(CoordinationService.CategoryNode.MANIFESTS); + for (String node : nodeList) { + try { + AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, node)); + if (nodeData.getVersion() < 1) { + /* + * Ignore version '0' nodes that have not been + * "upgraded" since they don't carry enough data. + */ + continue; + } + AutoIngestJob job = new AutoIngestJob(nodeData); + AutoIngestJob.ProcessingStatus processingStatus = nodeData.getProcessingStatus(); + switch (processingStatus) { + case PENDING: + case PROCESSING: + case DELETED: + /* + * These are not jobs we care about for metrics, so + * we will ignore them. + */ + break; + case COMPLETED: + newMetricsSnapshot.addCompletedJobDate(job.getCompletedDate()); + break; + default: + LOGGER.log(Level.SEVERE, "Unknown AutoIngestJobData.ProcessingStatus"); + break; + } + } catch (InterruptedException ex) { + LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex); + } catch (AutoIngestJobNodeData.InvalidDataException ex) { + LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex); + } catch (AutoIngestJob.AutoIngestJobException ex) { + LOGGER.log(Level.SEVERE, String.format("Failed to create a job for '%s'", node), ex); + } + } + + return newMetricsSnapshot; + + } catch (CoordinationService.CoordinationServiceException ex) { + LOGGER.log(Level.SEVERE, "Failed to get node list from coordination service", ex); + return new MetricsSnapshot(); + } + } + + /** + * A snapshot of metrics for an auto ingest cluster. + */ + static final class MetricsSnapshot { + + private final List completedJobDates = new ArrayList<>(); + + /** + * Gets a list of completed job dates, formatted in milliseconds. + * + * @return The completed job dates, formatted in milliseconds. + */ + List getCompletedJobDates() { + return new ArrayList<>(completedJobDates); + } + + /** + * Adds a new date to the list of completed job dates. + * + * @param date The date to be added. + */ + void addCompletedJobDate(java.util.Date date) { + completedJobDates.add(date.getTime()); + } + } + + /** + * Exception type thrown when there is an error completing an auto ingest + * metrics collector operation. + */ + static final class AutoIngestMetricsCollectorException extends Exception { + + private static final long serialVersionUID = 1L; + + /** + * Constructs an instance of the exception type thrown when there is an + * error completing an auto ingest metrics collector operation. + * + * @param message The exception message. + */ + private AutoIngestMetricsCollectorException(String message) { + super(message); + } + + /** + * Constructs an instance of the exception type thrown when there is an + * error completing an auto ingest metrics collector operation. + * + * @param message The exception message. + * @param cause A Throwable cause for the error. + */ + private AutoIngestMetricsCollectorException(String message, Throwable cause) { + super(message, cause); + } + + } +} \ No newline at end of file diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsDialog.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsDialog.java index 66c6d28581..80bcb6958b 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsDialog.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMetricsDialog.java @@ -29,27 +29,30 @@ import org.openide.util.NbBundle; import org.openide.util.NbBundle.Messages; /** - * Display basic metrics for a cluster. + * Displays auto ingest metrics for a cluster. */ final class AutoIngestMetricsDialog extends javax.swing.JDialog { - private final AutoIngestMonitor autoIngestMonitor; + private final AutoIngestMetricsCollector autoIngestMetricsCollector; /** - * Creates new form AutoIngestMetricsDialog + * Creates an instance of AutoIngestMetricsDialog * * @param parent The parent container. - * @param autoIngestMonitor The auto ingest monitor. */ @Messages({ "AutoIngestMetricsDialog.title.text=Auto Ingest Cluster Metrics", "AutoIngestMetricsDialog.initReportText=Select a date below and click the 'Get Metrics Since...' button to generate\na metrics report." }) - AutoIngestMetricsDialog(Container parent, AutoIngestMonitor autoIngestMonitor) { + AutoIngestMetricsDialog(Container parent) throws AutoIngestMetricsDialogException { super((Window) parent, NbBundle.getMessage(AutoIngestMetricsDialog.class, "AutoIngestMetricsDialog.title.text"), ModalityType.MODELESS); + try { + autoIngestMetricsCollector = new AutoIngestMetricsCollector(); + } catch (AutoIngestMetricsCollector.AutoIngestMetricsCollectorException ex) { + throw new AutoIngestMetricsDialogException("Error starting up the auto ingest metrics dialog.", ex); + } initComponents(); reportTextArea.setText(NbBundle.getMessage(AutoIngestMetricsDialog.class, "AutoIngestMetricsDialog.initReportText")); - this.autoIngestMonitor = autoIngestMonitor; setModal(true); setSize(getPreferredSize()); setLocationRelativeTo(parent); @@ -64,7 +67,7 @@ final class AutoIngestMetricsDialog extends javax.swing.JDialog { return; } - AutoIngestMonitor.MetricsSnapshot metricsSnapshot = autoIngestMonitor.getMetricsSnapshot(); + AutoIngestMetricsCollector.MetricsSnapshot metricsSnapshot = autoIngestMetricsCollector.queryCoordinationServiceForMetrics(); Object[] completedJobDates = metricsSnapshot.getCompletedJobDates().toArray(); int count = 0; long pickedDate = datePicker.getDate().atStartOfDay().toEpochSecond(ZoneOffset.UTC) * 1000; @@ -82,6 +85,37 @@ final class AutoIngestMetricsDialog extends javax.swing.JDialog { count )); } + + /** + * Exception type thrown when there is an error completing an auto ingest + * metrics dialog operation. + */ + static final class AutoIngestMetricsDialogException extends Exception { + + private static final long serialVersionUID = 1L; + + /** + * Constructs an instance of the exception type thrown when there is an + * error completing an auto ingest metrics dialog operation. + * + * @param message The exception message. + */ + private AutoIngestMetricsDialogException(String message) { + super(message); + } + + /** + * Constructs an instance of the exception type thrown when there is an + * error completing an auto ingest metrics dialog operation. + * + * @param message The exception message. + * @param cause A Throwable cause for the error. + */ + private AutoIngestMetricsDialogException(String message, Throwable cause) { + super(message, cause); + } + + } /** * This method is called from within the constructor to initialize the form. @@ -175,4 +209,4 @@ final class AutoIngestMetricsDialog extends javax.swing.JDialog { private javax.swing.JButton metricsButton; private javax.swing.JTextArea reportTextArea; // End of variables declaration//GEN-END:variables -} +} \ No newline at end of file diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java index 37c635b9f0..e46a5e43c0 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java @@ -23,12 +23,10 @@ import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; import java.util.HashSet; import java.util.List; import java.util.Observable; import java.util.Set; -import java.util.TreeSet; import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.logging.Level; @@ -282,71 +280,6 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen } } - /** - * Gets a new metrics snapshot from the coordination service for an auto - * ingest cluster. - * - * @return The metrics snapshot. - */ - private MetricsSnapshot queryCoordinationServiceForMetrics() { - try { - MetricsSnapshot newMetricsSnapshot = new MetricsSnapshot(); - List nodeList = coordinationService.getNodeList(CoordinationService.CategoryNode.MANIFESTS); - for (String node : nodeList) { - try { - AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, node)); - if (nodeData.getVersion() < 1) { - /* - * Ignore version '0' nodes that have not been - * "upgraded" since they don't carry enough data. - */ - continue; - } - AutoIngestJob job = new AutoIngestJob(nodeData); - ProcessingStatus processingStatus = nodeData.getProcessingStatus(); - switch (processingStatus) { - case PENDING: - case PROCESSING: - case DELETED: - /* - * These are not jobs we care about for metrics, so - * we will ignore them. - */ - break; - case COMPLETED: - newMetricsSnapshot.addCompletedJobDate(job.getCompletedDate()); - break; - default: - LOGGER.log(Level.SEVERE, "Unknown AutoIngestJobData.ProcessingStatus"); - break; - } - } catch (InterruptedException ex) { - LOGGER.log(Level.SEVERE, String.format("Unexpected interrupt while retrieving coordination service node data for '%s'", node), ex); - } catch (AutoIngestJobNodeData.InvalidDataException ex) { - LOGGER.log(Level.SEVERE, String.format("Unable to use node data for '%s'", node), ex); - } catch (AutoIngestJob.AutoIngestJobException ex) { - LOGGER.log(Level.SEVERE, String.format("Failed to create a job for '%s'", node), ex); - } - } - - return newMetricsSnapshot; - - } catch (CoordinationServiceException ex) { - LOGGER.log(Level.SEVERE, "Failed to get node list from coordination service", ex); - return new MetricsSnapshot(); - } - } - - /** - * Gets a new metrics snapshot. The jobs snapshot will also be updated in - * effect. - * - * @return The metrics snapshot. - */ - public MetricsSnapshot getMetricsSnapshot() { - return queryCoordinationServiceForMetrics(); - } - /** * Bumps the priority of all pending ingest jobs for a specified case. * @@ -593,32 +526,6 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen } - /** - * A snapshot of metrics for an auto ingest cluster. - */ - public static final class MetricsSnapshot { - - private final List completedJobDates = new ArrayList<>(); - - /** - * Gets a list of completed job dates, formatted in milliseconds. - * - * @return The completed job dates, formatted in milliseconds. - */ - List getCompletedJobDates() { - return new ArrayList<>(completedJobDates); - } - - /** - * Adds a new date to the list of completed job dates. - * - * @param date The date to be added. - */ - void addCompletedJobDate(Date date) { - completedJobDates.add(date.getTime()); - } - } - /** * Exception type thrown when there is an error completing an auto ingest * monitor operation. @@ -649,4 +556,4 @@ final class AutoIngestMonitor extends Observable implements PropertyChangeListen } } -} +} \ No newline at end of file From e63c7d8723ee7188d64ab78da75084bb2ced55e8 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 13 Nov 2017 16:28:55 -0500 Subject: [PATCH 39/45] Cleanup. --- .../autopsy/experimental/autoingest/AutoIngestManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 2ce6e2cba8..cf5e30ffed 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -2807,7 +2807,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } } - } catch (Exception ex) { + } catch (CoordinationServiceException | InterruptedException ex) { SYS_LOGGER.log(Level.SEVERE, "Unexpected exception in PeriodicJobStatusEventTask", ex); //NON-NLS } } From 556810da0ebc3ca64b77a9746b527025590d8eb8 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Mon, 13 Nov 2017 16:52:19 -0500 Subject: [PATCH 40/45] Fixed merge conflict. --- .../autoingest/AutoIngestControlPanel.java | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java index 8024fbf93a..be8abbd1ee 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java @@ -1579,11 +1579,11 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { "AutoIngestControlPanel.DeletionFailed=Deletion failed for job" }) private void bnDeleteCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnDeleteCaseActionPerformed - if (completedTableModel.getRowCount() < 0 || completedTable.getSelectedRow() < 0) { + if (completedTable.getModel().getRowCount() < 0 || completedTable.getSelectedRow() < 0) { return; } - String caseName = (String) completedTable.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal()); + String caseName = (String) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal()); Object[] options = { org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.Delete"), org.openide.util.NbBundle.getMessage(AutoIngestControlPanel.class, "ConfirmationDialog.DoNotDelete") @@ -1600,8 +1600,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { if (reply == JOptionPane.YES_OPTION) { bnDeleteCase.setEnabled(false); bnShowCaseLog.setEnabled(false); - if (completedTableModel.getRowCount() > 0 && completedTable.getSelectedRow() >= 0) { - Path caseDirectoryPath = (Path) completedTableModel.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); + if (completedTable.getModel().getRowCount() > 0 && completedTable.getSelectedRow() >= 0) { + Path caseDirectoryPath = (Path) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); completedTable.clearSelection(); this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); CaseDeletionResult result = manager.deleteCase(caseName, caseDirectoryPath); @@ -1747,9 +1747,10 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { */ @Messages({"AutoIngestControlPanel.casePrioritization.errorMessage=An error occurred when prioritizing the case. Some or all jobs may not have been prioritized."}) private void bnPrioritizeCaseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeCaseActionPerformed - if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { + if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - String caseName = (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.CASE.ordinal())).toString(); + + String caseName = (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.CASE.ordinal())).toString(); try { manager.prioritizeCase(caseName); } catch (AutoIngestManager.AutoIngestManagerException ex) { @@ -1775,9 +1776,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { }) private void bnShowCaseLogActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnShowCaseLogActionPerformed try { - int selectedRow = completedTable.getSelectedRow(); + int selectedRow = completedTable.convertRowIndexToModel(completedTable.getSelectedRow()); if (selectedRow != -1) { - Path caseDirectoryPath = (Path) completedTableModel.getValueAt(selectedRow, JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); + Path caseDirectoryPath = (Path) completedTable.getModel().getValueAt(selectedRow, JobsTableModelColumns.CASE_DIRECTORY_PATH.ordinal()); if (null != caseDirectoryPath) { Path pathToLog = AutoIngestJobLogger.getLogPath(caseDirectoryPath); if (pathToLog.toFile().exists()) { @@ -1806,9 +1807,9 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { @Messages({"AutoIngestControlPanel.jobPrioritization.errorMessage=An error occurred when prioritizing the job."}) private void bnPrioritizeJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnPrioritizeJobActionPerformed - if (pendingTableModel.getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { + if (pendingTable.getModel().getRowCount() > 0 && pendingTable.getSelectedRow() >= 0) { this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - Path manifestFilePath = (Path) (pendingTableModel.getValueAt(pendingTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); + Path manifestFilePath = (Path) (pendingTable.getModel().getValueAt(pendingTable.convertRowIndexToModel(pendingTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal())); try { manager.prioritizeJob(manifestFilePath); } catch (AutoIngestManager.AutoIngestManagerException ex) { @@ -1835,11 +1836,11 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { }//GEN-LAST:event_bnOpenLogDirActionPerformed private void bnReprocessJobActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bnReprocessJobActionPerformed - if (completedTableModel.getRowCount() < 0 || completedTable.getSelectedRow() < 0) { + if (completedTable.getModel().getRowCount() < 0 || completedTable.getSelectedRow() < 0) { return; } this.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - Path manifestPath = (Path) completedTableModel.getValueAt(completedTable.getSelectedRow(), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()); + Path manifestPath = (Path) completedTable.getModel().getValueAt(completedTable.convertRowIndexToModel(completedTable.getSelectedRow()), JobsTableModelColumns.MANIFEST_FILE_PATH.ordinal()); manager.reprocessJob(manifestPath); refreshTables(); AutoIngestControlPanel.this.setCursor(Cursor.getDefaultCursor()); From 22f023002c7d8396eab0366a60134850d72d506e Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 14 Nov 2017 02:24:24 -0500 Subject: [PATCH 41/45] Fixed merge issue. --- .../autoingest/AutoIngestManager.java | 197 ++++++++---------- 1 file changed, 92 insertions(+), 105 deletions(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index cf5e30ffed..338544d9a8 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -64,7 +64,7 @@ import org.sleuthkit.autopsy.casemodule.Case.CaseType; import org.sleuthkit.autopsy.casemodule.CaseActionException; import org.sleuthkit.autopsy.casemodule.CaseDetails; import org.sleuthkit.autopsy.casemodule.CaseMetadata; -import org.sleuthkit.autopsy.casemodule.CaseNodeData; +import org.sleuthkit.autopsy.coordinationservice.CaseNodeData; import org.sleuthkit.autopsy.coordinationservice.CoordinationService; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException; import org.sleuthkit.autopsy.coordinationservice.CoordinationService.Lock; @@ -498,6 +498,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } SYS_LOGGER.log(Level.INFO, "Starting input scan of {0}", rootInputDirectory); InputDirScanner scanner = new InputDirScanner(); + scanner.scan(); SYS_LOGGER.log(Level.INFO, "Completed input scan of {0}", rootInputDirectory); } @@ -553,10 +554,12 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!prioritizedJobs.isEmpty()) { ++maxPriority; for (AutoIngestJob job : prioritizedJobs) { + int oldPriority = job.getPriority(); + job.setPriority(maxPriority); try { this.updateCoordinationServiceManifestNode(job); - job.setPriority(maxPriority); } catch (CoordinationServiceException | InterruptedException ex) { + job.setPriority(oldPriority); throw new AutoIngestManagerException("Error updating case priority", ex); } } @@ -607,12 +610,14 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ if (null != prioritizedJob) { ++maxPriority; + int oldPriority = prioritizedJob.getPriority(); + prioritizedJob.setPriority(maxPriority); try { this.updateCoordinationServiceManifestNode(prioritizedJob); } catch (CoordinationServiceException | InterruptedException ex) { + prioritizedJob.setPriority(oldPriority); throw new AutoIngestManagerException("Error updating job priority", ex); } - prioritizedJob.setPriority(maxPriority); } Collections.sort(pendingJobs, new AutoIngestJob.PriorityComparator()); @@ -867,9 +872,9 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /** * Sets the coordination service manifest node. * - * Note that a new auto ingest job node data object will be created from - * the job passed in. Thus, if the data version of the node has changed, - * the node will be "upgraded" as well as updated. + * Note that a new auto ingest job node data object will be created from the + * job passed in. Thus, if the data version of the node has changed, the + * node will be "upgraded" as well as updated. * * @param job The auto ingest job. */ @@ -879,14 +884,19 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen byte[] rawData = nodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestNodePath, rawData); } - + /** - * Sets the coordination service case node. + * Sets the error flag for case node data given a case directory path. * - * @param caseNodeData The case node data. - * @param caseDirectoryPath The case directory. + * @param caseDirectoryPath The case directory path. + * + * @throws CoordinationService.CoordinationServiceException + * @throws InterruptedException + * @throws CaseNodeData.InvalidDataException */ - void updateCoordinationServiceCaseNode(CaseNodeData caseNodeData, Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException { + private void setCaseNodeDataErrorsOccurred(Path caseDirectoryPath) throws CoordinationServiceException, InterruptedException, CaseNodeData.InvalidDataException { + CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); + caseNodeData.setErrorsOccurred(true); byte[] rawData = caseNodeData.toArray(); coordinationService.setNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString(), rawData); } @@ -1052,8 +1062,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (null != manifest) { /* - * Update the mapping of case names to manifest paths that is - * used for case deletion. + * Update the mapping of case names to manifest paths that + * is used for case deletion. */ String caseName = manifest.getCaseName(); Path manifestPath = manifest.getFilePath(); @@ -1067,8 +1077,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } /* - * Add a job to the pending jobs queue, the completed jobs list, - * or do crashed job recovery, as required. + * Add a job to the pending jobs queue, the completed jobs + * list, or do crashed job recovery, as required. */ try { byte[] rawData = coordinationService.getNodeData(CoordinationService.CategoryNode.MANIFESTS, manifestPath.toString()); @@ -1088,7 +1098,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen break; case DELETED: /* - * Ignore jobs marked as "deleted." + * Ignore jobs marked as "deleted." */ break; default: @@ -1198,8 +1208,8 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen private void addNewPendingJob(Manifest manifest) throws InterruptedException, AutoIngestJobException { /* * Create the coordination service manifest node data for the job. - * Note that getting the lock will create the node for the job - * (with no data) if it does not already exist. + * Note that getting the lock will create the node for the job (with + * no data) if it does not already exist. * * An exclusive lock is obtained before creating the node data * because another host may have already found the job, obtained an @@ -1229,13 +1239,15 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * the node that was processing the job crashed and the processing * status was not updated. * - * @param manifest The manifest for upgrading the node. + * @param manifest The manifest for upgrading the node. * @param jobNodeData The auto ingest job node data. * - * @throws InterruptedException if the thread running the input - * directory scan task is interrupted while - * blocked, i.e., if auto ingest is - * shutting down. + * @throws InterruptedException if the thread running the input + * directory scan task is interrupted + * while blocked, i.e., if auto ingest is + * shutting down. + * @throws AutoIngestJobException if there is an issue creating a new + * AutoIngestJob object. */ private void doRecoveryIfCrashed(Manifest manifest, AutoIngestJobNodeData jobNodeData) throws InterruptedException, AutoIngestJobException { /* @@ -1248,51 +1260,35 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (null != manifestLock) { SYS_LOGGER.log(Level.SEVERE, "Attempting crash recovery for {0}", manifestPath); try { + Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); + /* * Create the recovery job. */ AutoIngestJob job = new AutoIngestJob(jobNodeData); int numberOfCrashes = job.getNumberOfCrashes(); - ++numberOfCrashes; - job.setNumberOfCrashes(numberOfCrashes); - job.setCompletedDate(new Date(0)); - Path caseDirectoryPath = PathUtils.findCaseDirectory(rootOutputDirectory, manifest.getCaseName()); + if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { + ++numberOfCrashes; + job.setNumberOfCrashes(numberOfCrashes); + if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { + job.setCompletedDate(new Date(0)); + } else { + job.setCompletedDate(Date.from(Instant.now())); + } + } + if (null != caseDirectoryPath) { job.setCaseDirectoryPath(caseDirectoryPath); job.setErrorsOccurred(true); + try { + setCaseNodeDataErrorsOccurred(caseDirectoryPath); + } catch (CaseNodeData.InvalidDataException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); + } } else { job.setErrorsOccurred(false); } - /* - * Update the coordination service manifest node for - * the job. If this fails, leave the recovery to - * another host. - */ - try { - updateCoordinationServiceManifestNode(job); - if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { - newPendingJobsList.add(job); - } else { - newCompletedJobsList.add(new AutoIngestJob(jobNodeData)); - } - } catch (CoordinationServiceException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); - return; - } - - /* - * Update the case node data and do the logging. - */ - if (null != caseDirectoryPath) { - try { - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); - } catch (CaseNodeData.InvalidDataException ex) { - SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to get case node data for %s", caseDirectoryPath), ex); - } - } if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { job.setProcessingStatus(AutoIngestJob.ProcessingStatus.PENDING); if (null != caseDirectoryPath) { @@ -1306,13 +1302,32 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen job.setProcessingStatus(AutoIngestJob.ProcessingStatus.COMPLETED); if (null != caseDirectoryPath) { try { - new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), jobNodeData.getCaseDirectoryPath()).logCrashRecoveryNoRetry(); + new AutoIngestJobLogger(manifest.getFilePath(), manifest.getDataSourceFileName(), caseDirectoryPath).logCrashRecoveryNoRetry(); } catch (AutoIngestJobLoggerException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error creating case auto ingest log entry for crashed job for %s", manifestPath), ex); } } } + /* + * Update the coordination service node for the job. If + * this fails, leave the recovery to another host. + */ + try { + updateCoordinationServiceManifestNode(job); + } catch (CoordinationServiceException ex) { + SYS_LOGGER.log(Level.SEVERE, String.format("Error attempting to set node data for %s", manifestPath), ex); + return; + } + + jobNodeData = new AutoIngestJobNodeData(job); + + if (numberOfCrashes <= AutoIngestUserPreferences.getMaxNumTimesToProcessImage()) { + newPendingJobsList.add(job); + } else { + newCompletedJobsList.add(new AutoIngestJob(jobNodeData)); + } + } finally { try { manifestLock.release(); @@ -1367,11 +1382,10 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen /* * Try to upgrade/update the coordination service manifest - * node data for the job. It is possible that two hosts - * will both try to obtain the lock to do the upgrade - * operation at the same time. If this happens, the host - * that is holding the lock will complete the upgrade - * operation. + * node data for the job. It is possible that two hosts will + * both try to obtain the lock to do the upgrade operation + * at the same time. If this happens, the host that is + * holding the lock will complete the upgrade operation. */ try (Lock manifestLock = coordinationService.tryGetExclusiveLock(CoordinationService.CategoryNode.MANIFESTS, manifest.getFilePath().toString())) { if (null != manifestLock) { @@ -1953,9 +1967,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (currentJob.isCanceled()) { Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); if (null != caseDirectoryPath) { - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); AutoIngestJobLogger jobLogger = new AutoIngestJobLogger(manifestPath, currentJob.getManifest().getDataSourceFileName(), caseDirectoryPath); jobLogger.logJobCancelled(); } @@ -2280,9 +2292,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!dataSource.exists()) { SYS_LOGGER.log(Level.SEVERE, "Missing data source for {0}", manifestPath); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logMissingDataSource(); return null; } @@ -2330,9 +2340,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // did we find a data source processor that can process the data source if (validDataSourceProcessorsMap.isEmpty()) { // This should never happen. We should add all unsupported data sources as logical files. - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logFailedToIdentifyDataSource(); SYS_LOGGER.log(Level.WARNING, "Unsupported data source {0} for {1}", new Object[]{dataSource.getPath(), manifestPath}); // NON-NLS @@ -2358,9 +2366,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen // Log that the current DSP failed and set the error flag. We consider it an error // if a DSP fails even if a later one succeeds since we expected to be able to process // the data source which each DSP on the list. - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); currentJob.setErrorsOccurred(true); jobLogger.logDataSourceProcessorError(selectedProcessor.getDataSourceType()); SYS_LOGGER.log(Level.SEVERE, "Exception while processing {0} with data source processor {1}", new Object[]{dataSource.getPath(), selectedProcessor.getDataSourceType()}); @@ -2404,9 +2410,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2418,9 +2422,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen jobLogger.logDataSourceAdded(); if (dataSource.getContent().isEmpty()) { currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logNoDataSourceContent(); } break; @@ -2430,9 +2432,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Critical error running data source processor for {0}: {1}", new Object[]{manifestPath, errorMessage}); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logFailedToAddDataSource(); break; } @@ -2446,9 +2446,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen */ SYS_LOGGER.log(Level.WARNING, "Cancellation while waiting for data source processor for {0}", manifestPath); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logDataSourceProcessorCancelled(); } } @@ -2502,9 +2500,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen if (!cancelledModules.isEmpty()) { SYS_LOGGER.log(Level.WARNING, String.format("Ingest module(s) cancelled for %s", manifestPath)); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); for (String module : snapshot.getCancelledDataSourceIngestModules()) { SYS_LOGGER.log(Level.WARNING, String.format("%s ingest module cancelled for %s", module, manifestPath)); jobLogger.logIngestModuleCancelled(module); @@ -2514,9 +2510,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } else { currentJob.setProcessingStage(AutoIngestJob.Stage.CANCELLING, Date.from(Instant.now())); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logAnalysisCancelled(); CancellationReason cancellationReason = snapshot.getCancellationReason(); if (CancellationReason.NOT_CANCELLED != cancellationReason && CancellationReason.USER_CANCELLED != cancellationReason) { @@ -2529,17 +2523,13 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, String.format("%s ingest module startup error for %s", error.getModuleDisplayName(), manifestPath), error.getThrowable()); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logIngestModuleStartupErrors(); throw new AnalysisStartupException(String.format("Error(s) during ingest module startup for %s", manifestPath)); } else { SYS_LOGGER.log(Level.SEVERE, String.format("Ingest manager ingest job start error for %s", manifestPath), ingestJobStartResult.getStartupException()); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logAnalysisStartupError(); throw new AnalysisStartupException("Ingest manager error starting job", ingestJobStartResult.getStartupException()); } @@ -2548,9 +2538,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen SYS_LOGGER.log(Level.SEVERE, "Ingest job settings error for {0}: {1}", new Object[]{manifestPath, warning}); } currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logIngestJobSettingsErrors(); throw new AnalysisStartupException("Error(s) in ingest job settings"); } @@ -2593,9 +2581,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } catch (FileExportException ex) { SYS_LOGGER.log(Level.SEVERE, String.format("Error doing file export for %s", manifestPath), ex); currentJob.setErrorsOccurred(true); - CaseNodeData caseNodeData = new CaseNodeData(coordinationService.getNodeData(CoordinationService.CategoryNode.CASES, caseDirectoryPath.toString())); - caseNodeData.setErrorsOccurred(true); - updateCoordinationServiceCaseNode(caseNodeData, caseDirectoryPath); + setCaseNodeDataErrorsOccurred(caseDirectoryPath); jobLogger.logFileExportError(); } } @@ -2807,7 +2793,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } } - } catch (CoordinationServiceException | InterruptedException ex) { + } catch (Exception ex) { SYS_LOGGER.log(Level.SEVERE, "Unexpected exception in PeriodicJobStatusEventTask", ex); //NON-NLS } } @@ -2938,6 +2924,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen PARTIALLY_DELETED, FULLY_DELETED } + static final class AutoIngestManagerException extends Exception { private static final long serialVersionUID = 1L; From e64e30181ead01685d1c48302c5e8f1fdeba7934 Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 14 Nov 2017 02:26:38 -0500 Subject: [PATCH 42/45] Typo --- .../autopsy/experimental/autoingest/AutoIngestManager.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index 338544d9a8..1cbe9af5a6 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -2939,4 +2939,4 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen } -} \ No newline at end of file +} From e78980cb6e7cc3d2a87638f6f6195766b4b3168e Mon Sep 17 00:00:00 2001 From: "U-BASIS\\dgrove" Date: Tue, 14 Nov 2017 02:45:19 -0500 Subject: [PATCH 43/45] Merge --- .../autopsy/casemodule/CaseNodeData.java | 141 ------------------ 1 file changed, 141 deletions(-) delete mode 100755 Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java b/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java deleted file mode 100755 index f4308e8954..0000000000 --- a/Core/src/org/sleuthkit/autopsy/casemodule/CaseNodeData.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2011-2017 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.casemodule; - -import java.nio.BufferUnderflowException; -import java.nio.ByteBuffer; - -/** - * An object that converts case data for a case directory coordination service - * node to and from byte arrays. - */ -public final class CaseNodeData { - - private static final int CURRENT_VERSION = 0; - - private int version; - private boolean errorsOccurred; - - /** - * Gets the current version of the case directory coordination service node - * data. - * - * @return The version number. - */ - public static int getCurrentVersion() { - return CaseNodeData.CURRENT_VERSION; - } - - /** - * Uses coordination service node data to construct an object that converts - * case data for a case directory coordination service node to and from byte - * arrays. - * - * @param nodeData The raw bytes received from the coordination service. - * - * @throws InvalidDataException If the node data buffer is smaller than - * expected. - */ - public CaseNodeData(byte[] nodeData) throws InvalidDataException { - if(nodeData == null || nodeData.length == 0) { - this.version = CURRENT_VERSION; - this.errorsOccurred = false; - } else { - /* - * Get fields from node data. - */ - ByteBuffer buffer = ByteBuffer.wrap(nodeData); - try { - if (buffer.hasRemaining()) { - this.version = buffer.getInt(); - - /* - * Flags bit format: 76543210 - * 0-6 --> reserved for future use - * 7 --> errorsOccurred - */ - byte flags = buffer.get(); - this.errorsOccurred = (flags < 0); - } - } catch (BufferUnderflowException ex) { - throw new InvalidDataException("Node data is incomplete", ex); - } - } - } - - /** - * Gets whether or not any errors occurred during the processing of the job. - * - * @return True or false. - */ - public boolean getErrorsOccurred() { - return this.errorsOccurred; - } - - /** - * Sets whether or not any errors occurred during the processing of job. - * - * @param errorsOccurred True or false. - */ - public void setErrorsOccurred(boolean errorsOccurred) { - this.errorsOccurred = errorsOccurred; - } - - /** - * Gets the node data version number. - * - * @return The version number. - */ - public int getVersion() { - return this.version; - } - - /** - * Gets the node data as a byte array that can be sent to the coordination - * service. - * - * @return The node data as a byte array. - */ - public byte[] toArray() { - ByteBuffer buffer = ByteBuffer.allocate(5); - - buffer.putInt(this.version); - buffer.put((byte)(this.errorsOccurred ? 0x80 : 0)); - - // Prepare the array - byte[] array = new byte[buffer.position()]; - buffer.rewind(); - buffer.get(array, 0, array.length); - - return array; - } - - public final static class InvalidDataException extends Exception { - - private static final long serialVersionUID = 1L; - - private InvalidDataException(String message) { - super(message); - } - - private InvalidDataException(String message, Throwable cause) { - super(message, cause); - } - } -} \ No newline at end of file From 16abf6bf4cfc01dfb7360dfd299d0d7900dcbb2b Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Wed, 15 Nov 2017 09:41:08 -0500 Subject: [PATCH 44/45] Update develpers folder contents --- developers/README.txt | 13 ++++++++----- .../netbeans_ide_java_editor_settings.zip | Bin 0 -> 42562 bytes .../Braces_DO_WHILE_LOOP.properties | 1 - .../Braces_FOR_LOOP.properties | 1 - .../Braces_IF.properties | 1 - .../Braces_WHILE_LOOP.properties | 1 - .../EmptyStatements_IF.properties | 1 - .../Imports_STAR.properties | 1 - .../Javac_DEPRECATED.properties | 1 - .../Javac_DIVISION_BY_ZERO.properties | 1 - .../Javac_EMPTY_STATEMENT_AFTER_IF.properties | 1 - .../Javac_FALLTHROUGH.properties | 1 - .../Javac_FINALLY.properties | 1 - .../Javac_OVERRIDES.properties | 1 - .../Javac_RAWTYPES.properties | 1 - .../Javac_SERIALIZATION.properties | 1 - .../Javac_UNCHECKED.properties | 1 - .../Javac_UNNECESSARY_CAST.properties | 1 - .../MAVEN_SEARCH_HINT.properties | 1 - ...verification.ConsistentAccessType.properties | 1 - ....verification.HasNoArgConstructor.properties | 1 - ...IdClassOverridesEqualsAndHashCode.properties | 1 - ...verification.IdDefinedInHierarchy.properties | 1 - ...rification.JPAAnnotsOnlyOnAccesor.properties | 1 - ...e.jpa.verification.JPQLValidation.properties | 1 - ...ion.LegalCombinationOfAnnotations.properties | 1 - ...ication.NoIdClassOnEntitySubclass.properties | 1 - ...ee.jpa.verification.NonFinalClass.properties | 1 - ...tyOrMappedSuperclassCanUseIdClass.properties | 1 - ...rification.PersistenceUnitPresent.properties | 1 - ...j2ee.jpa.verification.PublicClass.properties | 1 - ...rification.QueriesProperlyDefined.properties | 1 - ...pa.verification.SerializableClass.properties | 1 - ...ee.jpa.verification.TopLevelClass.properties | 1 - ...jpa.verification.UniqueEntityName.properties | 1 - ....jpa.verification.ValidAttributes.properties | 1 - ...erification.ValidPrimaryTableName.properties | 1 - ...java.hints.AssignResultToVariable.properties | 1 - ...s.assignmentToCatchBlockParameter.properties | 1 - ...ntIssues.assignmentToForLoopParam.properties | 1 - ...entIssues.assignmentToMethodParam.properties | 1 - ...mentIssues.incrementDecrementUsed.properties | 1 - ...AssignmentIssues.nestedAssignment.properties | 1 - ...tIssues.replaceAssignWithOpAssign.properties | 1 - ...lassStructure.classMayBeInterface.properties | 1 - ...a.hints.ClassStructure.finalClass.properties | 1 - ...Structure.finalMethodInFinalClass.properties | 1 - ...ClassStructure.finalPrivateMethod.properties | 1 - ...ts.ClassStructure.markerInterface.properties | 1 - ...ure.multipleTopLevelClassesInFile.properties | 1 - ...ructure.noopMethodInAbstractClass.properties | 1 - ...cture.protectedMemberInFinalClass.properties | 1 - ...publicConstructorInNonPublicClass.properties | 1 - ...nts.EmptyCancelForCancellableTask.properties | 1 - ....hints.ExportNonAccessibleElement.properties | 1 - ...ules.java.hints.IllegalInstanceOf.properties | 1 - ...dules.java.hints.IncompatibleMask.properties | 1 - ...odules.java.hints.OrganizeImports.properties | 1 - ...odules.java.hints.OrganizeMembers.properties | 1 - ....hints.PointlessBitwiseExpression.properties | 1 - ...odules.java.hints.ShiftOutOfRange.properties | 1 - ...s.modules.java.hints.StaticImport.properties | 1 - ....hints.SuspiciousNamesCombination.properties | 2 -- ...eans.modules.java.hints.SystemOut.properties | 1 - ...modules.java.hints.UtilityClass_1.properties | 1 - ...modules.java.hints.UtilityClass_2.properties | 1 - ...s.bugs.BroadCatchBlock.broadCatch.properties | 4 ---- ...loneable.cloneInNonCloneableClass.properties | 1 - ...ndCloneable.cloneableWithoutClone.properties | 1 - ...ion.ClassEncapsulation.packageCls.properties | 2 -- ...n.ClassEncapsulation.protectedCls.properties | 1 - ...tion.ClassEncapsulation.publicCls.properties | 2 -- ...n.FieldEncapsulation.packageField.properties | 1 - ...n.FieldEncapsulation.privateField.properties | 1 - ...FieldEncapsulation.protectedField.properties | 1 - ...on.FieldEncapsulation.publicField.properties | 1 - ...sulation.ParamEncapsulation.array.properties | 1 - ...ion.ParamEncapsulation.collection.properties | 1 - ...psulation.ParamEncapsulation.date.properties | 1 - ...ulation.ReturnEncapsulation.array.properties | 1 - ...on.ReturnEncapsulation.collection.properties | 1 - ...sulation.ReturnEncapsulation.date.properties | 1 - ...nts.finalize.FinalizeNotProtected.properties | 1 - ...les.java.hints.jdk.AddUnderscores.properties | 4 ---- ...mapreduce.ForLoopToFunctionalHint.properties | 1 - ...erf.InitialCapacity.stringBuilder.properties | 1 - ...ints.perf.Tiny.collectionsToArray.properties | 1 - ...va.hints.perf.Tiny.constantIntern.properties | 1 - ...rf.Tiny.getClassInsteadOfDotClass.properties | 1 - ....perf.Tiny.lengthOneStringIndexOf.properties | 1 - ...hints.perf.Tiny.stringEqualsEmpty.properties | 1 - ...ints.suggestions.ConstantNameHint.properties | 5 ----- ...tions.TooStrongCast.broadTypeCast.properties | 1 - ...nner.ui.hints.CreateTestClassHint.properties | 1 - 94 files changed, 8 insertions(+), 110 deletions(-) create mode 100755 developers/netbeans_ide_java_editor_settings.zip delete mode 100755 developers/netbeans_ide_java_hint_settings/Braces_DO_WHILE_LOOP.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Braces_FOR_LOOP.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Braces_IF.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Braces_WHILE_LOOP.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/EmptyStatements_IF.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Imports_STAR.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_DEPRECATED.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_DIVISION_BY_ZERO.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_EMPTY_STATEMENT_AFTER_IF.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_FALLTHROUGH.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_FINALLY.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_OVERRIDES.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_RAWTYPES.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_SERIALIZATION.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_UNCHECKED.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/Javac_UNNECESSARY_CAST.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/MAVEN_SEARCH_HINT.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.ConsistentAccessType.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.HasNoArgConstructor.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.IdClassOverridesEqualsAndHashCode.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.IdDefinedInHierarchy.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.JPAAnnotsOnlyOnAccesor.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.JPQLValidation.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.LegalCombinationOfAnnotations.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.NoIdClassOnEntitySubclass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.NonFinalClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.OnlyEntityOrMappedSuperclassCanUseIdClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.PersistenceUnitPresent.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.PublicClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.QueriesProperlyDefined.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.SerializableClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.TopLevelClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.UniqueEntityName.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.ValidAttributes.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/o.n.m.j2ee.jpa.verification.ValidPrimaryTableName.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignResultToVariable.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.assignmentToCatchBlockParameter.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.assignmentToForLoopParam.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.assignmentToMethodParam.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.incrementDecrementUsed.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.nestedAssignment.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.AssignmentIssues.replaceAssignWithOpAssign.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.classMayBeInterface.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.finalClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.finalMethodInFinalClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.finalPrivateMethod.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.markerInterface.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.multipleTopLevelClassesInFile.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.noopMethodInAbstractClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.protectedMemberInFinalClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ClassStructure.publicConstructorInNonPublicClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.EmptyCancelForCancellableTask.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ExportNonAccessibleElement.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.IllegalInstanceOf.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.IncompatibleMask.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.OrganizeImports.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.OrganizeMembers.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.PointlessBitwiseExpression.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.ShiftOutOfRange.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.StaticImport.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.SuspiciousNamesCombination.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.SystemOut.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.UtilityClass_1.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.UtilityClass_2.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.bugs.BroadCatchBlock.broadCatch.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.bugs.CloneAndCloneable.cloneInNonCloneableClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.bugs.CloneAndCloneable.cloneableWithoutClone.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ClassEncapsulation.packageCls.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ClassEncapsulation.protectedCls.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ClassEncapsulation.publicCls.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.FieldEncapsulation.packageField.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.FieldEncapsulation.privateField.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.FieldEncapsulation.protectedField.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.FieldEncapsulation.publicField.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ParamEncapsulation.array.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ParamEncapsulation.collection.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ParamEncapsulation.date.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ReturnEncapsulation.array.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ReturnEncapsulation.collection.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.encapsulation.ReturnEncapsulation.date.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.finalize.FinalizeNotProtected.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.jdk.AddUnderscores.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.jdk.mapreduce.ForLoopToFunctionalHint.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.InitialCapacity.stringBuilder.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.Tiny.collectionsToArray.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.Tiny.constantIntern.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.Tiny.getClassInsteadOfDotClass.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.Tiny.lengthOneStringIndexOf.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.perf.Tiny.stringEqualsEmpty.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.suggestions.ConstantNameHint.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.hints.suggestions.TooStrongCast.broadTypeCast.properties delete mode 100755 developers/netbeans_ide_java_hint_settings/org.netbeans.modules.java.testrunner.ui.hints.CreateTestClassHint.properties diff --git a/developers/README.txt b/developers/README.txt index ce28add84d..61fcac0963 100755 --- a/developers/README.txt +++ b/developers/README.txt @@ -1,3 +1,6 @@ +CURRENTLY NOT WORKING for NETBEANS IDE 8.2, +NETBEANS BUG REPORT: https://netbeans.org/bugzilla/show_bug.cgi?id=271811 + Common NetBeans IDE settings for the Autopsy project are stored here. All contributors are kindly asked to use these settings. @@ -7,11 +10,11 @@ Select Options to Import dialog. Push the Browse... button to pop up a file chooser. Select ~\autopsy\developers\netbeans_ide_formatting_settings.zip. NetBeans IDE will require a restart for the settings to take effect. Formatting can be done by selecting Source, Format (Alt + Shift + F). -2. To make Java code hygiene hints (lint) settings, copy the entire contents of -~\autopsy\developers\netbeans_ide_java_hint_settings to the hint/default -subdirectory of your user settings directory for the IDE. For Windows, this is -~\AppData\Roaming\NetBeans\8.2\config\Preferences\org\netbeans\modules\java\hints -\default. Restart the IDE. +2. To make Java code hygiene hints (lint) settings, select Tools, +Options, Editor tab, Hints tab. Push the Import... button to pop up the +Select Options to Import dialog. Push the Browse... button to pop up a file +chooser. Select ~\autopsy\developers\netbeans_ide_editor_settings.zip. +NetBeans IDE will require a restart for the settings to take effect diff --git a/developers/netbeans_ide_java_editor_settings.zip b/developers/netbeans_ide_java_editor_settings.zip new file mode 100755 index 0000000000000000000000000000000000000000..102d366250252dd4948d7ab4187e2436c412266d GIT binary patch literal 42562 zcmcGWV|?CQyR91=jT$>?Y}>Zc*tXR)b{gBZ&8D$!yGfeHjn9*Gt@UE>(|7ItJ13v= zeO&)>k2&u#A2~@7P?R^2kdSYr#cU+s{L>!-;C}-fYhyDLS{iG8Jx51-hx}N1*)Mb` z0qfEa2=%9BKa5ABAwk~0O{7DT7L#h!%FUBjw1HyHZ6L@uiYs@|CzkhV#83-|BgXj`Hw zonZ5Z<95IMoS%v=$EabRF@FT61fC{RQ z@Q;B}#?p5L2M1ed$xAdp-L4gcl?AWOgr4ck7&1v7_ElM!R={Ev@?}e$keD*M9v4%+ zNAdxE%#Au~;FObufc#8V6)Xz$=8ZDwn>TX*R}HBDp@FcWnWK%p11-O`wT+{mqnVAh zlAEoO1Fe9)k)DOEjhVG0jjNTVm3+7PHXUL+?Iqq!bH4<9K*NmYH&UZv8Z$DQ;f!$0 z2&1>c3^CZO_g9?uDaLv?_|ccfYi+Ej-=Dlp7I3{L#`dZP_PQomWXwzLT`4n-=WU;_ zwuV=C&v&aI>eZ`V4?bJ1h2o7z!5XJk(xZ`CZg{HGBv;J|zYRH76Z222oa@R8tM1(O z#O@!_FH@AKeDIong^bx(I(G7W*D|l9+Bs&J|^O%?t}KQis>+SfVhln>n(} z{Iu_54oi|Cr4*m;C}PgCGQVV5KqaP0+PL1+TEl#K8!#z7llDzW`O+3akj?l}4TZKU z!a6IhBUr&m_NjeUeZTGphvC>Cu`nGD_H^iMlL!*o`%1gu>l5!JaMJfpCe>|Ft7J;Y zCPHGv_e9p}y6R3Qj@0VMa?h=+&WV}td@Uz*^)79o3re7*z{UIAlJOX7V#ALWMO37& zy2=(d*C|2gJWfo>lC?+3?>>5P@vNd1fI84EDI6hI-8~aVeM5ROJJK;l@hB4myAyKx z!VA1~lvUVO;4YK>4D=$oXu{N3545}_W@7GVidp`g4W;*ug zF*R{eGjz~3r7WzovUgqfNu}OA+q@{er!76Xc$&8!-D(zW@SyZ=<6=y*>*!X?s`I^= z_3q89rx_A6R>a4!?^t`|$I|yb@az30= zwg}|sUOWU+LSB4iIkE&Po0YaWh=7t%zNGRsTh5u6pBXdQ*0PyJ<+{sA$+??4Sg*s7 zk4ai0Nm*v1Wm=3Uz0Vp9rN>Q*7k%&ud`G66J|Ag5O^oJn2L_yB%h3W7o;uQPDhcn! zclLZJ6D|IH@Lk6&`aKG$AJ3xJ)$p7Q$AMj#0axMfFXg*|1czRf&-{O=yo2M5^uLtX z0w|9XcnWck@U1y2{sXK)vFbNcgM1>Wa*t|C0?q!0U_uP>$kfL>oZVcEGIYQE8&9tN zwvJYxb&Lj4yt-QUks5Nkx_ZT>2KKgiUQ&mq=clXT)lvONwWl>Ib*{?+7MtG6LQ!z! zF;#S!I2PLuvUgG%M>*k2$0%Z8>^c=6`QI7%4BxUvCikl4$Bo=3lokPZ7Dy{B8Il%Y zKt*%l9Sv}V5Cnb6UAdN7{%P9+tm4KB9v9z@jcEo9TpEP9Dg#1KM zaR)A?SJeJp5vaOX5S}+vC7(CTf{XKq|lx19Y_3%=nTJ-pP$O`zk)MY^J2 zcUST7M8g5QaC7&(V}OL{{w=Di|G_F+?OH<@$EtJST2hvh^f-1>1CBe6@HSV&8k~w& zS1z6@**DsVC&97c(63B1lOG!03I=JI79pfnu(y5Hn|pj*TmyjtpM4Rll|Dx6}c@W8PEE6D27A4=||cK`>0R46NUEg9*>u_A-o*V(8mWKI$u8fcI61s z)c+_x<$wO{1@$Z~70n!9HP`wqE89Z{+j}bJ8MNV%dIFk4hJ5dr!~{p&0#Zn3)y_gF zSwd`uk%?K;G>|-B^2NK8C;P>8BSs@ik4c`#ifo*&aRGg>&I%r&D%rH@=DFgY8w7s)v!w4X`mgS+ z;%R+n?CYYa*o{d&~(D&uBHg+=(9Mh~XT2jqF7dZg56#wmqO#e5uw6Qk)x5*tN zZL>@V8+ZzGjil+b(H@xtKWCNzVFrl6278HR(tyUT`(q&O4zI^^&&IX#(Di$`uzmQ&Cu)z?3iJ`S zae8Ais|`=BvgEosVGQ!c-p@$(9fQ97SOzy=Z|Pt|detHcLVnpdOlpK)Z9vj{;x09} zA)-=wSaL($-E4=YzTIBXvP)}7GD$!3nJG|El`DdlTu2Q)^7f)MJDz}B4=vuAZ@5Wc zfDev@32w*;{LPYL*!LEde>MTZ!8N_oQRx-SD86fZYhN7!X9Px*<${b103i$D#Xvj#`u z`p^Yig1s)3kR&77r^Bc3ChAWRg2Y+C0~?$HAOy{yWKN>s;(0$cJ=c@%FE@_H^g4uD zUTAPt?!k(@MmPWn)mn6B4nJEpYj+*oW1VdZw^^t`2#QeyMfo(x151FV3LU~Jac z^QX~FquG@HplugtBd{(vryf1Z6)9+HU4aXs8b8vr8S!m?AN$c;topYCPRVFsggd;m}xaI?9`!_@5mgfsWdp z<1#TBQ>UUJUAr!3Pd1nqu)`Y+*+5aAZLTqx4l_}`<3-%~`1uYLo5G&xzxu)7G0nts z7k2(Yc;VmzBP${Yz(K`tIrtmtUeL+j-pJZf*wxnF$iV?{&U?vtT5_6_J#;AE?;a4& z+A0^4$O( z8}41iVgs!sN~=-McUcm+q@rWDSa%yoWBLQ;RILU4Ly&%_MO!)-KnPzBL2A)(-nOWv zX^7-`^Vb=2c{r$jXWw|si#Nt;oG6VMdh!K12vw-JiY4xLsl$9k}-@VYbVr@kdGN}~I^ect*ySi$Zs%V5f z-$|X+kA_n*m?sc7zWr?e^rcTo)?UXj-HiXT;D|aisptHY&N3RWA6Wr%XGm_JQIqA!M-y5=@I z_JHg4{&zP!pz)vQwmiOO_UsGEY~V^)=sCQ_x(m|N)CWgw$eZQ8KB;08dE-TFZxu>z zGOsN^C95)Ckl38izQ~-Cgn8==kXh}wGIRcosr}!{tRo=z@{Oh|NQeDW8gTcLptd+X zMPv6arilr0s*1RZ8)`9wO&>Ml^N#1`yvvSdjFlNJGrG?~JFCs!=)qZ2t$q{_x;^`f zks#s$MM;E(y;@}lJWuy_;?1cDUcvhNQoLb-uZm&%&A8(x$Uq?E@6h{;w4&`>QlL+; zg>lr6VGa4o!@wXH`WAS^+}N-gf~uwB!aa$O)F9k);gGCs3j;Cm_)1oe;xgc0zt)If zU#rcMkOHmi(B`sbwL&t#EdqBw_@2{WA84LwdCmTtf;^I|3Ia7c{@J z>f;pfc0&}Wjx8OvR59&H?r(_+;bC}*v1Lwh!R2NR38zUtKY}w^vDzT?R?yl*%ms0= zWYbHPg%dpCCtWO)Ei5d&&n=Y)gNiYn?Nj?pV~@9pTR8^%xN-fbIgO<`PA7Y}78nSH zSy|!lRWxmaseiDs4FaxPeAw)zWO7HkQ!dxQl$75Qo+#0Mkh>3?pT8yTsvlmz7ZZhO z7ws*Wc;C+3YQ)4e}X?gg8efYHGCh1_J9=@r3H3JGQYe% zp{abNxl~gGzi1uW3vBp#=HV#mnVL7ohC`~9G8VS;0r-Uq7gAiul1;rie5 zBlsske_2II1&PXaO`YUQ02>XQKNA&{rK~{1`$#o)17dcWyDOaDBSXz4Nw~h4tF=_t z3nmXP=0Kcow+NP9m%%v3DiW2YIr4R@P8puFe%jOZQ68!elV!Zrv;YSf%i7e+1dMHD zf;q&!tg?6dfb3^P4UQ^i`6ea{@iv9g8#3DqG3WIDs{k1x3BPI>g_8D)X{=c4%AlD5 z1eZ$30SdeaLJ3{;c{>9xTWMFKOyg%_osG0}o8c=Jgk&1+)55^a8CiU!% zL2=7pd|B0ag4NvW2r$byK(v+Q9G&1~5TQ9$r)Y~D`Q9_s)n!Qxjo=g|qSeFUv8qok zrPTUWAOx;OpC3Z;oZi@uCpSRYatv^D6yZ2G#E@V*k|R_Q-5UyGJjU)(!El5}d@f$L z)Rrd6E#5JwNY?q0rap^Ed6Wi%b*?BGjJ_?QiN0i=E^^LSo2|x;t+$|7_chW%x_Q!W z)h*g+AB1JfqNHfW^xA3Rt*zC?jmVaKWhDL}YE#h-#7$9q2dGQ8VqSQLdWdxPiLLU) zTgKfR#TpDU1-1Z!ske`TKvJlNjE<-mEnvME4T7#j{127i*Nq=-Zj)W7 zjoO9@hbFe(y@13Kf)KI}0LkV54@g3Pg0%cHbG%+|f!(O9YiK1~ziNqXy$&6|d%)1y z3RWg&jBz-ecR{g!m8bx+NbTTA((LszJr>O$vKKT0USt64v(cv_a7vHA2~e2bTK5EFimqx=A4 zfy_Nv3{~vRO*%7P&PvmvBU3ZSjNeeOsL^2n-$UfRV)+Bt-%gVldA1LkWrtI>bVyT{ zUFh2ar^m$H!v;ZV41%KS@c~QHJJH2nd1t4%h;dg$nbRcZ!{CWmJ2GMe*inWiLk9N` zUAF0u9>=_CE+aC6qkq~_&U&$KQ%*tHVxsu_uKH0OEb&)6s)&wRvsr0}+Dlu)7L6Vgl# zX(7YkQWm?qrccbC=kACz^kQ~O+ng>T-_2$%wQ%qvo7oa#%%YCmN31W+x4deuLHySJ z%IPwM*T?PI`kdf7quiSc1}ze&YS*13(RjL!u)gSkml14 z7PjJ(RfF1~<>R@+M5C+L5CEeAu2dR!i;?Q(f|UgLw(f7@U+3Q8 zotz<}9`zvNHC@6Tyj7ehSL9z5xdVm|l9d(`mf#b?aR?Nj*hrIozXS-Pe`!}*C-_)A zl}nk=u(!1391E^`)4htc4cki43nTaN{#hsJ$>uJk$pOozF_@|D=j)}0sBHkoE`XJf4$LaM_BNgaE;pi5SUY>i7yj~N>-C42G0#u)4T8mWiV@}X|WP7 zp2^Dh1YhlZ`ySuz=7+oORxpay57dz`B+xq+^rKd2=ViQFEZ3&zyWQ{0=c?^qi)BI_ zaAYj4z0(HNx}>|9)4=sn3N($9+3g78@6lROYO>-=nIR7pF|G5{3W|RurAUzjJ53QI z??PB5nAJ8@z<}d(W;$U)D+;Z$mFZS$2_c@KE3y@t!m*?%VSUJg9-8mla4F|U52s!v z2rF54E!o)Lb}#+5dT|ljnTjd2FC(bqYXg}Jb5cIyRdiSa#p5jxZ}b0vEdk2jI-dxMuZF~w>VU`)KdeYtLL}c=SC0h z)jrwcc=C4ec~maF>naX)lNqW`6^U)VulLUSm;>w{08veUn|c4nhVhRBshl9gual^x zEHFJ{N9s?!Z!3$RAPmAOpi|cY zKDp0}?_=Ei72#zsdL>Yg=I1ed5__3nGpg)}o-36_+a%^cW`gD)Gr?Sa{!T*kq8qFz zCTQ@pLx$j~=c)#5vNL^YwPh+eIT36}eyESEJ3M5e%LtVQp<}gd?fdP;q*(hwAcUV+ zwrliUR>^+Mhil+U%2+Q`VL@P8_RHxk@`g)e=izkGG=vg12>IdTHm($%R|L1VeNX0# zqdQ1D0x1B*0{yL6%>RQgkm2P3DyL`vx)#=o9+l~#L-aotYx7(5*iHZu2>v)z7c5bv zRK#stlV&JR{ee@1Q+jy1R_69Cb`II%t+C1e^-~q#RDu%j!Y)-q59t_R2{@H3l;MfY z+xob8JA2T3(9BhqHV5~k^_nw!X`ld$DA3T$$hYF}T5=;VvEw-z*2YG|<)_i|cd_(c ziwpto69I=3v0t)1^ol@XNzbrt1LckS$%=O%yvWvOhJ`2;kxe*FD%QSDAJpG#UmiL- zJe;;(x@DhF{%Ik_*y0v&iGW}fp{n6WXB}pk<9^`}cRMy)az3XB|C!^up<`;!(-x{^ zM{j9J-Nz50ocb^20B4pL<#GYaL4X6!8Khy6kXk!JvVEK6%94E!Z)8RTAXc^yd))ff zTG$|lx114OfRtwg4qPbEwF?h9PgGIGe>m)~OHLKddnjQ;k-I$^+>j<^&+*eXpS@0G zPJ3?}Er5O~ztxZKf1=+%PL(fvfj?X{{ySZna2xO~x{B_=CqnjP;sKUzNM#|vn2Ty1 zUJA1q>n<8bT{VW=pKC69@P z5F1;>rDs?d@O!{rjMc>6FkBSN^wve0Qbv)Kl}ACAbgR`KF$}bARQub=UgRg2Wn`M1 zYf15>oB=BxMXeV2%h8L-Uiposy#sQWls={Z2431>^|{_qe!TwSOCO0ow{aaUPIGi9 zOUl$mzvgw)Kwvdo+2$h4&T{`m$3u1Ix2pFur zT;t`{^qR%UD-=S{pg<@V%JJWGGX^@+FH{q4qCGRT1aorf0E6-Gti^9=@!Y(XUhNms zL{6_#>j?I=GL>msx}WBQKRJPqAmbmqdiwJHgJa=l+PRUXM25Wd@=(A92O><3zpUWr zWJ60=vHfbTnst1+MG7RB0y0=;W|}&ZvIHH7#viuN zXFUhge?l_x3JKo({{jirBDp+dea0w%H%I<=nWI%}RZuDAK$NfL)(Xg1s4N3j-4PgQ zNFU2QcZ!po-&*Zf`TJprngDSx?E>-Bja zQs8|-!aZ6XjD=z12qS%il$WWLbVif^*LGJlkhxZ!sSk?cbgD$o6>x6#ifZ!gniMn% zp=WOaphWxvrMS*Ol#nJpnm}Y_>ySteQS7@u)Y`5C-sFszR2}_fNce8;@ci7KzoeIs zO0*znt1yvDG~>F?LV4JCwgQ6=Sh+#w5;MhSTMbx!{)8V!WGu0;h&P8!T@Ke+pcILTQ0*zOv_BRSvLL3NWC6C_@t5K@t z%)`tAmPGUCTI(m)g7T1ATi-NiE2L7tCbRgI0&#aK>4E{ibcR8pD{)Q3rL6*O2^v0? zjC&@Og5VZAwJ!eFV0_rVgyU%OE6NO*pI1$*v%62XZapr_Dk(_8W9dO1Z`Kh--Ysdy zGgpGo&;w$a5_oF$Q8Y^K7cpv|>}vAp5{bn{kl;1v_U%|`wGL+RP7Am%cesNEcZiUj z(7CKmfV2mU6k+)ptiIOAHNmN;*D_<5_o8!H@+7uRBN`8F$9(VOkGp{pdt!milJi0L zHh-)E@VWitwua#MZCsu|Mur?HyXak$x(&Xz;21#nb7-hcJ2Kz%Ou+sOzO{+|ka7uS z;p`vh@zl?~Xu7{U;@azR5A)&$Ar>xY9{~z${B~mejfpR0=Afr zX{tw|VHVoFMp~Zb%anliH zX2$@F1DZ8CjgYJrgjBrGiRzItq@%(QR|Uk`5x^Rl#}NYOt)`;o}GRHbL>JU!*3`T1f1LK<>9y5y{wn zA@8sYwXcNOn+g2ztO#`EcQ%vOz%Lrv)Y}tv5ozf*3|uz2E-17JOMe>?9;i`lo5NMX z3e&NF@9`-Oa~6$cQ+MF%>C9T1>0a^VDvjphb=SbqLy8gwP~88wivNvE#Q&~x%ZxO9!-n9Iwuu$M)$VKF|*B<|zod3B_vA_JfS z7`NPQhV`xxv8u0?Jj)Vx?+1-EpDwiE(N#34epnDTZepoM-0-~c*@yTX*J3o;RHxAP zIohK_Xf+dfz#hVi3w(PCD`R0S%X5Fml+HF_biwI>y?Et76FPfp(oOqYG~`Tx>}vt2 z+quhq^(wFtta!i6ww-PhNm#u#B+*QJtM>T*+0{G$s-mG>#FlBMfq z-?Z-u*f;50#*jF4Y*UnURW_*Cj9T|{<$~so6>`)_55BYjJK$uNo7jRrGOtMk85z(8 zsRFuSis%}ZLBxnQt|OZs^~J*Kj~XaP%H#>l((1Fb&b4Y`Tl?l`295b#Rb$veJP^yS zuzmG6TtM2Uw`;|dx*N~{>z{B?Xr63Dd1?Q(%@F7n<=3KQ?M;1ftNnw5H2?)#^_MF^ z(+#4PxvoyQoTnnCLPy~j>_WU#Y+nf@o}^jd!jVj82|2HBL#R6w4D$@uY~?{1?`C}P z0nr}(2&c^noFzQ@2!i|L!62Q)f`boUr110?2il<0CwZOG-j+O2JSGn=p>ZiYKwt#f zvR6+RxQ-Krlo^+4k2r~Vro7u8+!4mT}TDqMHN zqr@#cdK0=$&yZ?9Gm!n=f?wqU{r3c@J8c@kel&eg2^V(QfHsLt!XMqh1=2y5X0+*W zq}OgBDalKlr1|8fO#%&6+KvS>w4@7f189>J&;c61;`j}&1Do^u&2M+N+*G&#Wx$M* z#UA%~{qfYoSwXNe=Ae$@?m1jPyHRRcOB1;c2Xi{~rZ4WMnwM!x)<56^UMfViPGX?u z->JZgDw*CXz0Gma9C2R;K5|^fa5sRTcA1@U0FhdTZ;#v>b+c^8Dnp6@bWjl=`9R*r zFx|0Q3D3kyri2!DWtoE7dNC zD-S0qR^CpZ)~9NZmU8VO&IB1;_1iYDrcR?Oq^-BpX!#KW$0ay`$v})8FdBeD_H~tr z?m+QeRUmwOfVzNGvk4|$PF(m*Pt$1GT(OcJDq1JQ-Y~USjgQ=kPZ_*%9$d1}>W*2m zD`lfExQ1S>OFJdFYl(o=r6^ZrVL+~NtVyoxuDrP`%nL?J&(iOjJ+u1!YAnrD=c5&{$o3ND=gCB#uieGnx+vb)pqpL|!t|!unxBvZ1zJhsj5&MlZ8&w? zzq<>*_X3}8=+9DYcL8eM053)Ri2`mOj-EGp8mGicyq6GiU{GU1*nxP)_!f_8T?SlN z1$W?@1cMH*j_3{Mps}gc@|M9Wl*Pj``eW`xWFJX!lUfU89t>-9*HvB2)80e^ZKvDNT-)$$$IFP*ZJRO{PT;B5N3j6f8*&Mo$!k`fqbazS49 ze7s3-E@&Z>u|M;HMKVR1*UseOq;7Q2)|owe4E4Zg;(AzexFBZ>vT@f)>Y8q6eD`ne zJ(S+W#LKybYIte=W37|l>V+?fSPdbK4iH3V;lheJiQ1!neu^s>TLdG=B3~W{M^wRC zyk^G??v6D8#vQXBt|Z!kO4PZ5z96ZMr|J&}IvR)Q$Rd5z>3_}_Pp>itT;^KO(!O)c z;k)P^yC>-k>wJ+OT`N*N4j}#Si(`MI7i?wbYGnAwZBc!!)vs3o2-i4u9(a^`uuDjl zOm=-=mPjqhEi3);#v+5MFvQ0Mc^_q*aiMX%>M?;Mzkm1PU47>2Se*~Gn&dEZgv~XD zgmtopE-ppSP+138wY|UE4mv9|q}O^>A4}q88c2|hy}ym?{&*pJi7ZvW&81F4HM{h4 zCRQy|>{Hq}u5b6!{a)b3s`WdwOLel4@N(Bpn0{I#KRFik5IrFv$aKzhCItR>Jzw=( z%))1$DtLht)N37*jG+z46n7ET);Mg;Hw}H&&4sPws_9{GjNFvbbUvaU*i)wBKuinuH`hqycJdGzS~+sw8kyoM1U>58?)GT={s;E|yTr1^SC6HI+@-jgsf~ z9Q^|>`_6+@+}!jcF|~j+ht10m_Brz&TAFBl#oI+F+sPt&hf-ADy|=W{Y(a98jrSzg z5fbUPRo8DGIUhhuSIc3PWI8=>haUvn{I@qI)|K3V}| z1Vu1vQgt3`&pzMDpzO+h;{!@*@0K^SPR5LmV;1UgT6MBmJO`21Tn*lr(n%73TTl-B zi)SKkFVPcXwI`XWY1X>_hk)eG;XQso?Wfnef@qV#5-`9~0)G3rh~Yn{i$Br&r+R96 ztYp*@;GGFzjNq&^Bm0#|mKg3tS zz#)@4)5{=5-U;f{FEfjnKil#ECulm@ChbBSEEoQ zR`38^CZwvi2ntl)qX@hHZdi$n)_c^1@V44LjrNem$Mr+y!NdD1iS!gJSdPYnIP&kV z_@~1g_2ilXk%~19B@_1N4CT}ZkdlWHyW{2O=YcekZb2O8j$oFQrR6!D2p^63=h2g9 zH0!3qBC6HYr@z(Eepya{CJvSwDv==FzS{yz_kWVUT(z==JaXCjT?kH99Bg)xcmm=c~a1 zl%4(dj*8}QQ08RmXa+E_Ki1CmivPSfjE6;^KsyWN7_8DYv#=c0d*6*0P9&57D?Zlm z{Yb`X4ei)!1rEDFxV}w<$K1|48;UdWVqvkSu&_>g5&#QhIik(QYI=TnTD|OfaBFx< zrb*&u9hj1?(HKJyXY&!r)Gff-zqm>>(!k1eBRoP5iN5GXrdx1ZU$OJ%!X=8}L5Ln$ z2u~XLX&Mz)aTrPOF4|T&K_>kY-m~G(9$L~G?-a_DQ_Z>g|IPLg+ z&_`svj?Rh3sZ+In=8NOp+fQ`>w3_sPqAPjWqsn%;ICXVyWhoD(S2qvDVG3zAR-T@Z zx)y!S-vIma40U$wLpO|^;Fs&OnQ^Xq0jnAgfpqs1?qx2j7I!_|1Lh+9Z_Vp(T#Q-$ z1FL`Xulj%WFTwmj{j27q2HU0KEwxY`N0yuF{TFZF7$D}BleMlvl7OQjhyBQP%6bh2dWW+N{K-M=W5N62!A z)JGBz+&TimIKnXgg`&Jj$(o- zyIXSK-X# zN@Ik?xBXdpq~(3tKOL4eS9CjPZ|5AP5Xc~5tfG&O=k0LwViV7_AI!+nadKCbl4nk8 zloenclHUs>QqmcSQwB!)E=?Dm?JUc?oK-TJb~2b)k}BkL6x~ZSD0duAl?!XFSr9+J zzuSIJ;>)P|X*J>_PQO)|g6%DWTX(Se+Qk}v+zx&5FSg%;M*DaD3$Td!hk?C}$ad9l zKDK}d9Ngt9d6WW0p+hxAFhLn*o+IEdgh4C-0yi@7vPye>2;d0^pf@v;Dup*zuP<6B zgNf5aj0qyBm89^EB5?Kk;s73(r`?D3F`8$zah(-rPY25|RSV!>7~yO>xw3TAkr$4s zHBuJajIxGagm3LT)Ij{&Z$`QI3#ZAa4ldsOIUuX#KSq%1W6Qr0*o}NoDKOC+tu_K5 z@p)q~e)-i6*8o=C`BEGpbV-gZmk11#57#wh5mDu{Ch#^44J)-|<+UA# z1CrPBuzBUd8GZP;5&sPiUGa@+{yW+}eIw~oNqKZ5OW3uB8SvtPxCegi;IvQK{8Nsp z>%xjmrlA!nIeEhn1+^)1f?tZ@F@;ks_Xfe$4Dz$@m@)^G!d+ayK22k1UC@G)Fjv>X z3s1g8NaD{%&WP?9u3}9MKP`|69ET#w#NIA--C$jlr5}$Roe$cJ%>QyQi5Ca6M*8&i z4+k^!V89&jV0G{6+{`)|{KLUk(JE{%#)0G`Bzs{3pRx9CpR*E7s46R-0z8M-!<%2ckTZ;IaDG5|tG6DXcfc zQR&DYd8U4}n+0kg`E-K%?kfBnPjuX1hJ%hu?|F|B`%mtVV#R`hdXJXPFHYq@Dt*J4 zhK`7mC~vgTYeN_$M&krL4Ja-I9PA32R>+y96*uXXlNCU}DaED~(J&>Oor};y5J({cZp@Mx-^LHtL z1(?SpM1>!Wl0eR9keXDx z>)6y8R0Idd>kdnwbDD+LpVu@hj41xBtwj78Q0GF?Qqc}Vh#-qy7_J`hD0Wf(Q3x3% z_6{U$M|N~^eBp1Mxcg9?IhMdF1^2rOB^Wn6W;2I(CJnEP*l`u+9(q@oP4_PVo14V@ z*L2N`Hy&mSkoUuH&+LCAUCTO}8rdtE0v_-iI{bBh(|qZW2du+9B9s7L0isB_gtRgx z#stJrwXOy607~Qw`bK~f`G=Rz-8S(APdp2vErY|DXD*Q}9(;rzr>%Cb^er)l19(H1 z!ZO8fzXMu#yMWf6GN5%Q z{8FMlLbBgA^aa-%>JMu3mzA(n`9zy$om=ebXE3B1btGk7GkdOXT)MwI-y4?3ZKslK zpjo)G2PoogX$zGA$up@rgSTZ7Xes+5&jh4X9?+6G@x7;;*)S}^DVmbBm$#ud$BLO# zNtM8+;T0 zAuDFFIFqpyQ^AT*(^({JxIu_c3wJCOepl0NS-v&(2ccNUe6HG3@Q8}#$T6~O;j1@6TaeWA0T+( zfOE-;+okk^U|)VZIg5P2ZR8v;d427( z=5&XbZF*>`#qbz7ikbZ0q7F=t$F;78=-%r>t7x=E6k=$qpt7aYBvdcXy^u{aP~^E{hC1v2dHC$mVg0 z4-H4|g}MYnVzlpP)uf69z3FOJ??j%S0S$%cpC$C^k4x6RFuol;0*+fs)WFv%yE>)oJHUjLFYI9g9+;HLR5YL``J1B zw^W`&#{+i$a1?_yw{hr`y0a`byQ)lS7>&&kg5pPU!f#eaEkpY}*!Z5!%euFkZsW{R zGdU3+#<5P-4IWr_PP22_-iVUg%3BBPq;ru~(u`@JPda>>Z>y8emhwr!G-_&B3qci= z3~?%opSt#UHV@+B zy>y|UfXh1~zi0WVvs;X?>qEb9X%SRmJ~UyoKYzR~pcha*kX|kcjD8Cm;!LmBK~>Ljt9Fn*E+Yv$jD^ogpSI3Xp-0*`VqgSC`NY$J+pJ zVIuYs`bA>7*(%G#52=zBfD7srk}2}XZ*K;1ynf!D^*ksxXzg`6QzFSwN3ULN^>4?# z5(b0V$C$8-@wRUPA2kVH3DyEEod;2>bnS-29D`+`kYeC-~Q_l#QCELwxxS z1c>dIQsXD(p3nkeEvWk0n3qlA{E!m4*AU>r%+&q3&HBhXb)))|K_lVj4Dcd1bqBYhzYO{^pV~*d zjB29};xFh}LVUFyZ{aXBt;n(gvl7Q3sE2?aF_cWatTYLBs5GW0{{rmD_vSMXri-$l>5hJ-GV4ntf3KLGEe(q04wxa%TH6Fjx zx%&1tap!0u*z!q=)a`sqpc;Dw7+ID%7Ssih?Lv#w(3UM%@}gsCv&Vwq^Rk-Xkn0cf zSqdH(+@F~UZ4ePx!-1WN@cIbYzFoqt)jXihs@my(+&2b0pu87TECCFU%I0yK*J;EQ zh>1u?6O{R-@d?Sy=e!G% z>ar;^5CP)G^dmmKE9?-Y$2FS$9XIs}jjE@o6%NkGuAV3AO4~iOFR1Yfy*q>ipqBGn zGy5COx_?6LFU=x_2n%S0tNnHSl;i~y#LaJLCl>!i(Odxqd%n9rd#8BCADSjc=UOnWWM^8c$ zPeB>6jyUMt+upV=p93qQw843l<=DV%pM_A-JBlc1dWB51!H%{)2I<7zz^KH)NAj-7 zaPA6VWf3LPhQl0n*7JSjVSZ;+qz14{J-Bz7eV8b{u`gEEeAV^r`Gwn{Oef*K-4W4a zV*r?D3GT|l9$;mOhVxMv$PZF5n)^eSE`=(A%QPHH_|znG*{Tw}ULxN=7Js3?EQY_s zLck}yncD?;86cABzeF+x)0ar54~S%qA}^6_Pk!}V7Tq6_?89fx@eU8V^qN%LEzZFl zbi7}YOjTo!?In`&{uZvkv77x@xc=Kj28jLNtC12C0Q2%2F^Qxk(x;n=*ys~=O`(Kv z;{Dvt4kAt&*puQqaKDpexBXU`BHA#`TN;K5?N=iMVn5r4h5|giUN+>jkv)~HC-$| zb*oSnUxS4bYRM|tS}6;M2T;JGhGCnsYDE=IXp^lZSkNHvT0U1&fqJ>{rJW)0TjakJCL~|)S;ImyK z#6%pE3g&TCYO?gCGlb}YPW_}B)-|poNsq(P$7i>7Zu8)E;|Fd>B+-pvP?iC>v(hM0 zz`DjFF)9);GGEsHjh1`g6=+pBYtK#V5Es_VQ$B3 z@*deSUa;a8?59M(m)s4Q?0r_mYs;J|KSx^C3Wt3X4_f9gGvFVkFPwxU86q78gEI@>lAe*w%N3=sK;&%+@NED?bQ1BHkU zE$g%W=LqU14*%%!RPDC$zrzKAAJC`v$l40jUecKJ&8}rg@yB8tpN2oFs zntbLque(QcjFMg^-5*{N3%FMLPKW#^ zSz_gaC*yKHb14qQ=}T=lr_^(##MBk#6>?sT?E8{SM>#=kn1IsE%MtzskkLFjUj>?m zAf7av(f%3}p-{_y#w$KSq(Y=8x^==;3?91{FRMS)tZelge2woCwCV~ z;D!}+32ZX9x2_El^xb2BR6mO0*$69yHC5$}l1C@}Qj)bKmV)S-1TAfp&Qsg=V#eLj zAxX0-ACGPK0Xxp;%PWj=pOeQdqZXrV+X%&%R$B7*gXD)!XjfGRWBo*x18I;Oas|4tN>*5ke@6l-8hi9!-e8qg>KPT8JSXALljh)uYIHB)H& zG9r!3MXvH8xR!G6VR7nKJqzUA!mr4xMW-VxtJ;?yf`^RQO@DbmS1rV!}K;*2o)E+mRxB53m620N;H(i3mIr=ekd%@ zF0*)3)+mWR6E~ykG1VM?^vMb(gT~+^@dE+@zx?8bi-ZMfDgJlyzQj*Pb0ExBG}+p( zgplDmhoi_AEpGvxf+;UFn>t)xcy!^ErnTrdf;clsUD@l_EfOkSm+`#$vC35 zL3%@;&`{7{cq{@R(^1oSpHh!j`emybuI)I9H)?-$;b08a6ovELUY;@E0+R9JNnlTN zi1UUgMqv!$@o7-c;@VoY2sp!uUuONzQJD_y4F8GgYn0FI8)?9RAH?SZ_KnwZAp6FN z;Y4z6n9_?OF!AU*Ji&MviPSmR#y1IsZosSKpgP!uIJo3}YWa~z7S z)p{P$e*yE}8DASt*mpJQsWd>mpkp05u$XW-h&<{KLbyx8oHrIeSYIjP9Ta?Xc01$gc4Mgrg9HFRP+LS z>R2Il=ps0&78fOwC6V?qYxQb@FxD;bk&o8;4WATeutUze6F#(C;1^F+PJP#k(YZD;g5GKToXw=OxNv=saWge(Z}wx zHHETTq(>lWpdT2s*PE%w+=;GwkS=0sfyR3iZb3np+NOEC(`ab5Uw7f_#KExdrD_f(<^2fiRh?YwW7yB({m^R1VQ!d$%TDU z-!5Rt7M$0*uh+!#CG5r#^Z|QHJJB@`c>~C5L!#iW@HGom_-8A;2mKNeV*F%28DS3G%?GNq6+ zZTB(tneL5?JQ!lQ}4^5IRz zDq$2_;|}zE@P3AVgf8uiCT9mcvW+)wN660TbF_{e-YP=b6h@cDjJMZ1E_dn@Fep|8 z5W@RC%A)sWO`WC>Yj4oFcQJb>quJi)QA&P2Oz{`YHk6oJMW&Alf>$?JPQq=E4PR8$ za4Wo@J1xD|^S5Rk0SuSPFAdi@(tW480u{XfqcXdE&uwW6!v&)a0$zc5C8B8pD8?bN z71xvXgRDG8JIDzf&R*ZziOpaC22N7h=6uO!RSdA&|FoX%r+KT}SDj?MM+b2|zYF&C9FN zMG%3;jkgn!S(Jeim?-o6{P$E}XcHuJfIq*Xyvi#Ni}^}4Vki@?OmSFd@Oo-Vb(1T_ zLyBU-EblS($C1S$6s`yLEJOI>Vd`IQq6^8?k1=1W>RzQJe~&=w^Gtta5uR!D?sHxp ztcjld2CrcmwHLBXvBB+Dp`t4utAy&)3Vj_{B5YuNZleGZ8r zC=2s_25Pgk-JNPRm_%d8UuLqNwkiYL9QHhnT zk*w?{aPn#P@$YeopY<}(t3xpzQo?5ECkNfaN7GrpRf$VuvDeVJxrVOmYwM27Hc{9y7{Fsr!dcHTO;O?$Cpm1 z6NfNv|i|g$$VwDA(~7b>@lq?ELKgP61~@#>K=iPE!(r5nC@4P-vrFhX-6OC!$bGE-|7#= zxm?x1@i`!XQ>VU4b3{f$FuIBQd&9HAsb=h&T~F)^7#_={%+RbFL!@tpXIwq@j^F4` zc|Q-g$CrJbLzA+OQNAD!Am&!vh~uHNLA=!yjmrpbvE|k1jbUy<#PiR!ye-hPB&l71 zn&Lilz8W_oNhs&3G*bW>J+5N6_q zVa|J0c|EncL~tE=JXMh&l-t-c-ZSJAI>Wgs#+a^BOMY2ifuV{_z8v9ofCQp^yRuTa z<{<<&cBqd&@VFQb0v+3464f5VxaU-NJosq_QxjPoufdV9dHUX4$>5!waz05>02xrB zPq-@O80Q_ovwgt!fUFLaOJ?NT=#K5^Q}c6;1rhnGYzn&VGQ|8yT53|HkBnlJ#}CH` zlDr*&HJsorgsnS?F1JOKCg1c_lsVus+A!R@nDKS*X7DbOW%;-2bB+dcza8H5NW5Hz)X&L$s)d`bSzM`+F@yzMJc|_$OLKDUB>D7ZRB^LQp*n9(-{8^vN>zEd0+HI%2`s7ieP$Xw>(6b%y7tOGF}uhJ18736 z6u>b-Dp@XYOt3W4_wa>8A_w@@2;j~XH9zP)zGET{56X$l%j6SkHVAQ4fKxOKl0hge zM7wUprHHPHFe={)gvL8TxfrAfbJSuJ<_Xu#+n>=?no1Vg^3vbh^To#$NvG@$3}V#I zQ;fRMmwQJ~{X!_x%1Z|%BR)mIp-C?R4(+&R0JpYm-)NZ}9jI5S5@Tzkdrb(wXO!G~ zPy!qL+QeWlgYk=@nrIu=r_`3aXmb?XXz1f>SK9R7=TZ3$9jeckS-30q7scNjwH_-g zx31eb$yfs-qQIkRu;OukeZ>y9!29*!6K&MiE>C(=xQ&2uFTskx!(tQf_ zn%sYoGc(^eY>Hh)d0Y9O2B~p8&$jPnDSzx%A5a7&0Z0-S`9>?y*EiPLC@FSyt96UN z_?Bu`Uy~b-GzYw*ZiW=W7U0y{zdE&}M%_dU4@1dCI@E#{mV;w3BQJ(70xzsd!j+(I z0H(T6QnvX}h?2a?rWtSAd7As8HD}^Y_}3T1FPhU(KH#v9koE>Kc4lE9I4w^OQ&1<} zXcG|ZDp%l|$pt003Vu>T@GEf+z&G#8T3X%p&~Ts4g}bLd?3;Q(NXu}tTk>XJEsl(? zldgB1GrPt>`FlQ+*}TYs@ep<4(f)8fRSQintGp!xd;N76CWxXFWU1N4z)G4%fnMHw^2n zYJ|fET2Bv^95W2HmH;IR{G}3|&#(Q1WpLIa6@h_-OPL~mm3}uXq&{Ic_g1QyNO%OO z_H~+5;)OCGpe|qmEK(NR)~h+0VxCir6a};|XPc)<#W!pnPRyfBx&wagINPafw6+5Q zNDX#(ewL!h%nGkRt{&uON z=51^ZH|ckuZAx(rXag5|Kd

`>&W-pQO2^>uKkCtg=$XbN~_c>jn7}JcbUuPC zpMzMtY~FqR{{}`}Ai)c6wiEs%thsU@c}R>l$u%nZ3#Z zc6+?wKGPU$+4 z=2*J-X;uqxW}$o#!_)sRV*D+jGd{o6ne!PQKxd%39TTBY)R{FOtF~w02096w8>#l) zWO%b(5J5bVOC$93@8WS8IJ;L}cVSxRC3)vN4;rnr+6Z%j?gTp=X$n})IEfFOnU?WH z5tiHUJ}=2`(T#JyJ?gmDl&nsCps`skbt#d)3@+KL?@1vj0wbk#qd~8b1OtV!OjTas z`r3qR3)*AB^!jW3n0Ok+9rvDS8blC73J)4!P!m6M`mrD(rOI6ybWFJ+r{K^NPvBVO zEugA}Zvc|YYEAX#A|cPNi&QB~A)jub$mEO8LG4vcqIp0Ru_W9Q5#2ne_!=$w0UOW^ zjX+w;St>1MuTxK9e$ z5)Zr$y=CIvx~CUGfBfX zG%5L)nnZc7sQMf1-9P3B0>f#e)`97$HbH@k8emckIRWbBycr0<-i5CM=!q}c9+pJ| zN05sU99NTLD_&fO3rj3-z`fxVM%ree5_84lk}r%0O+>u4o@^&_pv~1@oB9Sxq+GWS zmCl5XfGcJf@qm+T@-hU)5nZ{5O^C#okPB69TO3L3_5>1u?YzKq0ETlk@D8)qeG>qN zvumC?1uis^jx(|`0!bN9?4TF|!|6)2PF!R0OhIPExG%%aB_F>+F|ZiXZPo6Y@B*Qh z!m)kjR0m^p^Jc4FnK~BZbZ*dsL!}yDzW%QJ#(v)972o2KYP(n9i4Fj$b2@0*mYD`^ zY#XS@D|4UlzUtdBHEeew1Z`O;#og}ByzzUlL#ha1lZFOt(mJ`{gF44$%DDtBD_HU% zd$UbEf)X^>iC`au(ZN1_QM@lf15dHB=Q!IY?bBW^hWFp!G^A~(b?V9Gykd4iYF1z~ zcRtq^KbwWdlr=+R=H10cx=#nBUBKo{{MjDy$?Kzfz1&*VJ)`&O0466&1v%Tu13@0s zmvkf#*0K1~jI`!b1{^9>mi=QlUoA7pRa>D6R^K`VKsl8OHAy2GENSu`&2c}l=9x() z=&%W~VK#eySgNtr7K}a7u%WfM6YQuZMCpEncxsZG+dMaZt4Qa_jR5gAM3H`DlPdnQ zN$Pv2Ht80$O#(sLq`9*;DOUZ%oy@i7y>_<4{q=*vBgrzEpyUJZeMOYpp0Zj|6e(9* zRA#PTdBYHejdz;c#kr1INURrxO=>si?%E5~5BM|hGm72RyRP#2O(9UGqW^bp_d!<4 z;>=wzvhE=X7+&HM0=_`p<03}rS$p>5sC>qA(UCOW2LJV;rT5ZrYjVN#so`d;7CIyrRls@r1Zc%%Zl zr~x&lp35~{o;Vayz8EgUX>u?B0Xz+xl%b5~oq`ZmQt^Bo45CF(Yov=n-BD; z_wmY}Xxj8xAfi6*^~QSO0T0n9C&VjO5PeGjrC~Y;`W~Q9r(Jk|BfIuPkX_vXWLKpc zV9PHUI_K{?1Z?@&Fb5Bx0mUda)qI$Ys2f@ zpJl}JWL9Z2s|seo6l-9$2*`p?9Z3bN14ZyV-h4wNE zeqo$m%h0;vm3E`xp+#=3zGIjSni|@d7{+=dt+>pv!)l(En~a%k{AsON@0L%V-t1+I zf9w_+V)v5#p??FOhE>XtaHIWFkK%(#{M^}kDke~9h4z@ILRMnwGxwmJ6r6TBU41wdn@`MO4nSMF62 z9}mHZKZsab2=2qsqguaoaTPb-$o6IltCkZM7g_38`}EQMw7v(5uT$G6lL+M0;*r)x9@kz@L+0&d`JaLDkRp9&b z1%VMJ{$qy_jrV58ByEk0SHg)!86sNO;RHFFS zkw~{xz@qJ1c`$Rm!NXG|WsxXF+n?~^8+^2H)m3|1ETE+AbDyXdw<=o30nzcmn5TXk z(yy}PoDtmI5i^A#5<1e*Uu(WPL(x0(aS0T=Yr__PSLjo4ccq-Eb_MP1!BAsgRlr94 z(^!k*Cb^E|;>O%koDx~1lc z5&7ApfT9;FDLDcPET*hMZ{`cgI_szlbwT>^5Lo=nDB{Db8De5HvCdz*s+m05M+0x1 zYL`(z_%H3Mn(#<;OGO6;n8bUka%W5|fy9q8V8d6#cPL5i8paUa;DCHC&IAHfWs{BI zrMIrrlS~f4NdPJ@I_c9wY%2HnW2GR>4v5L32TWF?j74|*!#3=uZQ*}w*LFfQ#QTv} zG<4kY--N%fduU!dzDd0h`fA4^GB29@pzvsu$F*TD8LaeSU`6kREs9L+Y!X7nL-24W z2q@8XEIm!?nwUQnm6Ncfb_GR72OwA*1D)@BQ6eGv3ba=r+~d(@@;_PJ+ATVwk()-)YT)DzW+KP9= zFyoosJ1*_1Q>!LdA36h&80HpAElKrXogCJ5vN-+UobDwlbnOx}m!EBXznA0k`fEKV9trUel_TcK` z=VcW4L!hdBdk_SiDjC?$_M(P?;;FAu0EL)e11Z*tF8{taPVeM%9fkN?7O5nbdp<8d zEu7oHHCaDxJ%`i=Sm>l(VU1h2_C>^JbmXjL5kWOG;26CcN=CO}I~7tF|- zIj5-A;PbUM0I*YgR!`oCHk&pUgKI(2@xo;ZTl4nEs~j40Te93Qli`gZ?ZBCw<$wz$ zRMWoUrDom%l*+{vvh0R4$&p9!tCoO&O=~bm00)T<)kEE2~^pq%h&R}YVOQKA0*Eu{x z+<_!Ly>Ni|;;t!Ui?EfI*RkntNxYF%?RAKi^&YVw z`8mhCE4NxviT9;Z6qgkC;ti-pTDD2(T@vUM-1y$rf2M`=Z%ZuXxF(^gI60C>M`@mi zuv!ep9Ha@ZI4n8=SC<{u@Sae$gWpEtIV2-6a37ua!#q{+!&?WmD(jb8b&jIAf3ylX zG5D51f!LVEk+V#CkJsXyF#oJ0*@+!R>&)O-_Ygkhi-e^C~FlSF5+ zdnOL#mUPV<~ zJ7Q#@=<-L=6mWoeiC-KczoZB?-3@&y#zQloja&9$(4kaqF$bYHiY~O>^y$X@=h+R7 z5d0n#Q2Q)=N_BULf{y9E_ZVd% z@owJJH=bRC&xct^xzyIMULULE4sjI>Y~9mKDa#xjW)B0RsvrTN0+Rt-OPgWLt0-UQ zb$g(QTEDOgc-skxstSN@UZ4LR@&&c=W?Sk?{A!TzVugD z7;XV7mGn!MItP3{Frdl6PQkzyFjltz23)cSfOi0tO+Uy)Kjx`hckZGK%D+^NL&K#n z;a?(jVAPK<^rH+XZnqwM@SsCx1nwu7`ajn^hb>G=6imr#wfU3omO(k;MQ#BX z%YorPvh5FUKA9t@$>7vTMZ(R{qa7t5xQl%qsnJBz6mO{g2 z;^DIW=yi_)S0Qs(WKLzSYtKq9nzxWqJGX?BQF9nq zCS4}ZeN()8;zaD$_k_YpAcyq9Q!%7C&$m~l;%U8dI86POzPNs1vKTqpJ&?yAen^>_ z8aCH+XcCV^uv79joAAJ<`m}x1NvK&i0+4jNagkWvo8>Iu7p1E8TI0TTRSX86VeQ5sWpH9SuS$W(2m#N%I_6B; zI61iat4Wl`_-gS=jN7tOG4Gk)BuphC<(6mBE6u#|Z$JE`4(6m44)7UGYrfqu8*tNC zS5lX`!A^*B_LV-RX-~eh6;DC^woe9ItpSTgopI6RXMgwYw83Z>n;Y3GH|0l;Qpm@{ zRjcma2Rh7cH&Cwe*>3c&uYzC}haQ4X-ofLfZus)oyvcoEbens2^JB7hk~2a{Vo{6l z=Jb~bPC>^C%dRF0qF`UY!Y0k_t^1*OVx4=oJorcFPn$)Uq{AYk01asWr3P^RUIT3H zEUg@M?DUL(`0<~Sz5p6&6Y5(KAb$K0H*7GAxXpr$NlDs2f`Fo+U5ki;-Df^dTS>2D z`@2^=>x*u+=vU`xH;&fznM~u2-di7YQ*-y7kX@U8>o(?IYnJrEHA)s%?Mua%-JK`( z68=bC8cMX63e6m(IF^b#u*$F$o?Y2PuhVYhxR1ghebq5$LiZi-Zfd6{=F2kW!aP=+ zZgH>8&4e^N3`K-eMvPaShc$(&u5+79!f+!Ze56Ww&B(hQcP?bMg1_+J8Pv&19hy2+ ztv}4DX~?NT9MKu*_r)Ym-caHnY$Fj9W5&a zC20;lfrB+2x?JItxAW5j=}*WnUD6A9wY)@1D9yN~u;jw;`E~rxV^5}va6s~9zm)tr z=79Y~@~0m@VE6wL{SlJEC$rNJzX^ zJD?t|pIVR+2FKy)1cxXnaU8d05{Dd=-5u57mz0x!)3ZFU$h;KeqxXe2wqA~+PF?Dh zD9&dud?rkMN(Wir(qI;_R#2pZP@@VW?%XV}2yR~$9zxc@6Iu#JAWBNfq9+reaCLwt4mMzA#j}Y9vc@v0bqN) zrukeA^BZfbaa%X;V^8hYA-+X_By%X1z3RnJR%i?Rc}66sCo!E~A=;M&K;M*4C1u!? zm0Q!}<;OO9K6^nqxN58i0NeZ2zRSn1&1)=rnK`vPZc3~Pp0@TA!lH8Y-32UW5v&u! z&v5ZOe3~9ncYsO^DO{ULHNL#9XK6{JT%)!w1gELWxbgz!NI?2^zm)zt{Jx!znSr8( zj+L#krQMG{{eClGm(l`EVka7M4W)g^8?5k>S{lWtAK?SUb zR!K2Uuukz>Fmsn4eT66_S$OHql9#D$7FAg`eE@meZw=)$QHgCRcv2#}9P|MPS&~iew3RD>CRI!ze9jP34LX3M9>~yA z?JPUpwfy1Gz;X;$aHt2TU{+Gw`1+{n1opbpTN>J#!@XXjd$k>&Y{OfNQNZV4=B$2F zQ>#;OWfpc2KQcrBtlSPRrOg)7u_Zx3w*Cxr`4D`Rab=F(CMdfH=aT^0n<0@63rN&Q z8l7;VjZBu~MY}Hb%+tEd7Ty!4hwSEq$!c&aNNgfXrS))S9YPmIE;Cft1XneyBaE}p z3BYznm!&MhsRluW4^!Ew2dhjfNnk6Ykxjs>zM!`@(7NSizqtGv4n#nYKYBlKvbB<; z&~a6p9+}V8d!2V86}O}h^=aTXMQmJLk>n?Hq+CD+mZgc*N>>?{Z4o~Y2d7DVxbS9soK)X7}yCw@L1@62+m@lZ(?U@Lj!CJ>>$5QBWMqJ3UjFIPZ2qv z$eK0W1sazTY8%=2Rw~#i#09rR!*Eb<6LLpp;xU(e>i9IYT+SvBN0=~peae+j3pW|v z(@J+q!|dje>7%;ER@S_6;fidW`WtiQ?UL->BPOLZHqSMfVh97JH!G!J;Zw0C8>U;X z8Ym+=4+h$=*GKDjx!_b@%43#)El^j}wh{UKBd3Athm`!>3He(gSzNOZuPoE$xu86z zne){W^eiXxIKb4w12?5C8`C z=V7$+zh9=Dje(&7FyU6u;E&!R)Mb!&*d0-C3XCBFj)GnbcJTyE1?C1;;UG7=sG$Ael=3Qmg{omjU;~o2P)Iax+jxPc>6sAF%$shTGX!YGuVp;@Ub`g?RUG9 zmi3SJ!?p*?op)cWUy;>RK*2(?us(c*(*qI=ArwMcRayqI&x|#$I8I78zNk6}T zb2w&IXdxl8HT#VUzZgf&$peMw&%N0lwOESkMU(Xc9O(J*=q1-lwbS^*Wyd>KR2%&r zFxUp7+!hn~(Is+o3%qW@O>hjNb|4!C=fcyQ)9g32T_LY;zAQ+Cf|Vu@GYwZWmXND% zf{#m9)6Jb|IM^){2Q#tCj40q21@{f zboymyn9je2X=mVMN9ROis^g$T2PL6D`8c31I~A;wH00v|=H(KN)R5(r-;`${5(pZT z8xX{xFk@_4He_4Lmp^eQYw?jo&&V*A>FjddntF{o7lM{S%wnua&hd<=T=G(zl7U|r z(l$>lyR03vo7+0)JF_siCX9!<1rl75kFbq>y-7A+@wua2w8xEqxbsl5Q8mW*Hm8ob z+e2Z5mQ*}aJ9G14BT()1E6kN5ol1!%>f%@(nl10;ZQfV3WyX4ouaI&vkGR8Bh}s{G za&xHAKkIJCzQ36N5_}g`;x=7jz3|h{Yb%D7A!rmzIPe$i)2rACP)TE?rj@vbnJMx5 zSo(^4#B&yPkpy2dp*L7FCTZ08H0v%GmU*x?$vipMr(47~!`XI!;|l+3FuMQk$Q#U{ z2@?{X>V~#YYz;ZQ6Vz9erbn)ey~dhBiL`)Mj%tf1*R(OSraDS1R=E+4&Q3)dhdrMz zaZwA;3YmgG%lo<;D{96IjB%;Vl_e%t^R`+dui8F!gM0f-{H?Cr0V<${ z@T>2c|C7t#qO-KJ1EyEo(tWjwv{p8jRt7e9CI+@!bmZ+_y;KhE zQeEw`qmI(;)a`Twdu?4*6eHy{;(6r#FZ;SBs3qqYHd{0M*b2!f_&6#{o|CiCabRnr z0LBLCA}RA@CCCo_KJe$~#s>rVse#4ma13!f19Mwi6AMF2+f#O%bnRj^vv8-xD)yp1 z(12gP{7YiUz)yAUO+fmmRHUECL*{Y&O~W6c?^*lM-nM_68HV)n0ygX8;RDPtyGxh_ zl?!+BJf~HzxUoOBopG~z!dpJoqJ;jgvpL0#=e@#DLe0UQ~VnWLhdP(vz98}|AYgRRsfS}`=y>;03vU#1MrwE7%K zJmPB04P83MPupVnsM?>l0ME91`t-1HSTI;H7cX9fkruO*d`ey5FAC@c1i%9w`1#qT zXK7(*Vnj!4p{rwOXJacTdG=z_sDYjs;NoZ?+J@x0zb#+jZ~wYj_|#w8((zkZSla18 z^hL?p3UCYp04SE3)!)$N-;=I~73_eJhM?y|(w^TIWUlJDNkb(;zvob+93mS3Lk`x! z_i=s>-%4V?=RiGr^4u2SAPzhO1myYKvOxMR2SFVWNYTXZbinxcgmnvM?I1|<8318O z#_rqVy!3m*{~9OpJ$d?+djUBBc?fJZB$xATaijbp`Cq*MExczTsQ{P!e{qpU{ZlSK zmjU$j_p^GW45<5Wr2N-dL}4c@;2_Wz&|HZ3`u=S}4<0}Jwom};^rsyDo+oi!q9q~|C9 ztJecPIO?nf{K$UG;qO5~zxJrmbBWG=ZjM0d{%`Emzga+4KYlN3=*c$_6vJ=J6>x*+ zDw-^YJq^NWXq9^-M=%!7f3;@@!l;pmPZITPrK5ocdr5TMup zh0E!I)%OyDo*r;EGI<1`0se-~-xC9VbpOzR@sNc5xAhKC>vIzRMbH3^91ek<|F%Q{ zq5O{iw~CV=-8(c`^x1DP2}l&3pY|_q9~w3o&>EL2LP;NW;Kr z^Zl0i50ARe;1GY$4jRSutf=__?!S2!C}fboadE$=4-H;<_ESg{{8ReB5D7GbAp{BY z+d>riGd4eQ9?-aTXVuDB?9Uhg31qAQp+v-sH zEr*}#4m9Y&S(CJ@dOoVZkSTOW{8{2G8s{Yb^Q|3pzci%z@Y}N2JP+}oMDWlpvu7jK zk_YD``-?XM-Aj3v`bFLIQvdlYf$q9H`%0ws&qMtupAK{j*4dkWXn0Ptzlf~*q55(l zZCu}0r}4R{|3r|`{TXLJcr3H?(*4D4LznEI-H+*;pNBkD%g6Wj9J*HftSu?9`~%Xz zP($cK-m~up)8y;p<-rv@-%m0Jb4}*W`3K>WR6x4G6Ew7(c#r)`*pldPC9$6iF{8v6d zd?4tw|5-Whd;ONt&r|(BdM4=n@>x0UdjCII{qV7%)2~`kLV|(W_x%?ysQmN)B7gn8 zMS{*ADxbL*;Ya_vm+#U>|DJF1;}km7Wj}iL(Ag`UGq-^0|F2v4-zP1 Date: Wed, 15 Nov 2017 11:55:52 -0500 Subject: [PATCH 45/45] Rename DataSource => AutoIngestDataSource for clarity --- .../experimental/autoingest/AddArchiveTask.java | 4 ++-- .../autoingest/AddDataSourceCallback.java | 4 ++-- .../{DataSource.java => AutoIngestDataSource.java} | 4 ++-- .../experimental/autoingest/AutoIngestManager.java | 14 +++++++------- 4 files changed, 13 insertions(+), 13 deletions(-) rename Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/{DataSource.java => AutoIngestDataSource.java} (96%) diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java index be6a047bc3..ffe121cb7d 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java @@ -172,7 +172,7 @@ class AddArchiveTask implements Runnable { try { UUID taskId = UUID.randomUUID(); currentCase.notifyAddingDataSource(taskId); - DataSource internalDataSource = new DataSource(deviceId, newFilePath); + AutoIngestDataSource internalDataSource = new AutoIngestDataSource(deviceId, newFilePath); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); selectedProcessor.process(deviceId, newFilePath, progressMonitor, internalArchiveDspCallBack); archiveDspLock.wait(); @@ -224,7 +224,7 @@ class AddArchiveTask implements Runnable { synchronized (archiveDspLock) { UUID taskId = UUID.randomUUID(); currentCase.notifyAddingDataSource(taskId); - DataSource internalDataSource = new DataSource(deviceId, destinationFolder); + AutoIngestDataSource internalDataSource = new AutoIngestDataSource(deviceId, destinationFolder); DataSourceProcessorCallback internalArchiveDspCallBack = new AddDataSourceCallback(currentCase, internalDataSource, taskId, archiveDspLock); // folder where archive was extracted to diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java index db19fc2fbc..fc00149249 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddDataSourceCallback.java @@ -35,7 +35,7 @@ import org.sleuthkit.datamodel.Content; class AddDataSourceCallback extends DataSourceProcessorCallback { private final Case caseForJob; - private final DataSource dataSourceInfo; + private final AutoIngestDataSource dataSourceInfo; private final UUID taskId; private final Object lock; @@ -48,7 +48,7 @@ class AddDataSourceCallback extends DataSourceProcessorCallback { * @param dataSourceInfo The data source * @param taskId The task id to associate with ingest job events. */ - AddDataSourceCallback(Case caseForJob, DataSource dataSourceInfo, UUID taskId, Object lock) { + AddDataSourceCallback(Case caseForJob, AutoIngestDataSource dataSourceInfo, UUID taskId, Object lock) { this.caseForJob = caseForJob; this.dataSourceInfo = dataSourceInfo; this.taskId = taskId; diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSource.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDataSource.java similarity index 96% rename from Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSource.java rename to Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDataSource.java index db9d3d5ad9..89a0d0ad5f 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/DataSource.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestDataSource.java @@ -26,7 +26,7 @@ import org.sleuthkit.autopsy.corecomponentinterfaces.DataSourceProcessorCallback import org.sleuthkit.datamodel.Content; @ThreadSafe -class DataSource { +class AutoIngestDataSource { private final String deviceId; private final Path path; @@ -34,7 +34,7 @@ class DataSource { private List errorMessages; private List content; - DataSource(String deviceId, Path path) { + AutoIngestDataSource(String deviceId, Path path) { this.deviceId = deviceId; this.path = path; } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java index de2b65a966..35b563b961 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestManager.java @@ -2229,7 +2229,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen return; } - DataSource dataSource = identifyDataSource(); + AutoIngestDataSource dataSource = identifyDataSource(); if (null == dataSource) { currentJob.setProcessingStage(AutoIngestJob.Stage.COMPLETED, Date.from(Instant.now())); return; @@ -2280,7 +2280,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * interrupted while blocked, i.e., * if auto ingest is shutting down. */ - private DataSource identifyDataSource() throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { + private AutoIngestDataSource identifyDataSource() throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Identifying data source for {0} ", manifestPath); @@ -2297,7 +2297,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen return null; } String deviceId = manifest.getDeviceId(); - return new DataSource(deviceId, dataSourcePath); + return new AutoIngestDataSource(deviceId, dataSourcePath); } /** @@ -2314,7 +2314,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * while blocked, i.e., if auto * ingest is shutting down. */ - private void runDataSourceProcessor(Case caseForJob, DataSource dataSource) throws InterruptedException, AutoIngestJobLoggerException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException, CoordinationServiceException { + private void runDataSourceProcessor(Case caseForJob, AutoIngestDataSource dataSource) throws InterruptedException, AutoIngestJobLoggerException, AutoIngestDataSourceProcessor.AutoIngestDataSourceProcessorException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Adding data source for {0} ", manifestPath); @@ -2393,7 +2393,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * while blocked, i.e., if auto * ingest is shutting down. */ - private void logDataSourceProcessorResult(DataSource dataSource) throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { + private void logDataSourceProcessorResult(AutoIngestDataSource dataSource) throws AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); Path caseDirectoryPath = currentJob.getCaseDirectoryPath(); @@ -2463,7 +2463,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * while blocked, i.e., if auto * ingest is shutting down. */ - private void analyze(DataSource dataSource) throws AnalysisStartupException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { + private void analyze(AutoIngestDataSource dataSource) throws AnalysisStartupException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Starting ingest modules analysis for {0} ", manifestPath); @@ -2560,7 +2560,7 @@ final class AutoIngestManager extends Observable implements PropertyChangeListen * while blocked, i.e., if auto * ingest is shutting down. */ - private void exportFiles(DataSource dataSource) throws FileExportException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { + private void exportFiles(AutoIngestDataSource dataSource) throws FileExportException, AutoIngestJobLoggerException, InterruptedException, CaseNodeData.InvalidDataException, CoordinationServiceException { Manifest manifest = currentJob.getManifest(); Path manifestPath = manifest.getFilePath(); SYS_LOGGER.log(Level.INFO, "Exporting files for {0}", manifestPath);