\n"); //NON-NLS
@@ -1254,17 +1274,31 @@ class ReportHTML implements TableReportModule {
final String align = agencyLogoSet ? "right" : "left"; //NON-NLS NON-NLS
summary.append("
\n"); //NON-NLS
summary.append("
\n"); //NON-NLS
- summary.append("").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.caseName")) //NON-NLS
- .append(" | ").append(caseName).append(" |
\n"); //NON-NLS NON-NLS
- summary.append("").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.caseNum")) //NON-NLS
- .append(" | ").append(!caseNumber.isEmpty() ? caseNumber : NbBundle //NON-NLS
- .getMessage(this.getClass(), "ReportHTML.writeSum.noCaseNum")).append(" |
\n"); //NON-NLS
- summary.append("").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.examiner")).append(" | ") //NON-NLS
- .append(!examiner.isEmpty() ? examiner : NbBundle
- .getMessage(this.getClass(), "ReportHTML.writeSum.noExaminer"))
- .append(" |
\n"); //NON-NLS
- summary.append("").append(NbBundle.getMessage(this.getClass(), "ReportHTML.writeSum.numImages")) //NON-NLS
- .append(" | ").append(imagecount).append(" |
\n"); //NON-NLS
+
+ // Case details
+ summary.append("").append(Bundle.ReportHTML_writeSum_case()).append(" | ") //NON-NLS
+ .append(formatHtmlString(caseName)).append(" |
\n"); //NON-NLS
+
+ if (!caseNumber.isEmpty()) {
+ summary.append("").append(Bundle.ReportHTML_writeSum_caseNumber()).append(" | ") //NON-NLS
+ .append(formatHtmlString(caseNumber)).append(" |
\n"); //NON-NLS
+ }
+
+ summary.append("").append(Bundle.ReportHTML_writeSum_caseNumImages()).append(" | ") //NON-NLS
+ .append(imagecount).append(" |
\n"); //NON-NLS
+
+ if (!caseNotes.isEmpty()) {
+ summary.append("").append(Bundle.ReportHTML_writeSum_caseNotes()).append(" | ") //NON-NLS
+ .append(formatHtmlString(caseNotes)).append(" |
\n"); //NON-NLS
+ }
+
+ // Examiner details
+ if (!examinerName.isEmpty()) {
+ summary.append("").append(Bundle.ReportHTML_writeSum_examiner()).append(" | ") //NON-NLS
+ .append(formatHtmlString(examinerName)).append(" |
\n"); //NON-NLS
+ }
+
+ // End the layout.
summary.append("
\n"); //NON-NLS
summary.append("
\n"); //NON-NLS
summary.append("
\n"); //NON-NLS
@@ -1414,4 +1448,17 @@ class ReportHTML implements TableReportModule {
+ thumbFile.getName();
}
+ /**
+ * Apply escape sequence to special characters. Line feed and carriage
+ * return character combinations will be converted to HTML line breaks.
+ *
+ * @param text The text to format.
+ *
+ * @return The formatted text.
+ */
+ private String formatHtmlString(String text) {
+ String formattedString = StringEscapeUtils.escapeHtml4(text);
+ return formattedString.replaceAll("(\r\n|\r|\n|\n\r)", "
");
+ }
+
}
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java
index 9286d935cb..b11352175c 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IngestedWithHashAndFileTypeInterCaseTests.java
@@ -1,241 +1,251 @@
-///*
-// *
-// * Autopsy Forensic Browser
-// *
-// * Copyright 2018 Basis Technology Corp.
-// * Contact: carrier
sleuthkit org
-// *
-// * Licensed under the Apache License, Version 2.0 (the "License");
-// * you may not use this file except in compliance with the License.
-// * You may obtain a copy of the License at
-// *
-// * http://www.apache.org/licenses/LICENSE-2.0
-// *
-// * Unless required by applicable law or agreed to in writing, software
-// * distributed under the License is distributed on an "AS IS" BASIS,
-// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// * See the License for the specific language governing permissions and
-// * limitations under the License.
-// */
-//package org.sleuthkit.autopsy.commonfilessearch;
-//
-//import java.nio.file.Path;
-//import java.sql.SQLException;
-//import junit.framework.Test;
-//import org.netbeans.junit.NbModuleSuite;
-//import org.netbeans.junit.NbTestCase;
-//import org.openide.util.Exceptions;
-//import junit.framework.Assert;
-//import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-//import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
-//import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
-//import org.sleuthkit.autopsy.commonfilesearch.AbstractCommonAttributeSearcher;
-//import org.sleuthkit.autopsy.commonfilesearch.AllInterCaseCommonAttributeSearcher;
-//import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
-//import org.sleuthkit.autopsy.commonfilesearch.SingleInterCaseCommonAttributeSearcher;
-//import static org.sleuthkit.autopsy.commonfilessearch.InterCaseTestUtils.*;
-//import org.sleuthkit.datamodel.TskCoreException;
-//
-///**
-// * Tests with case 3 as the current case.
-// *
-// * If I use the search all cases option: One node for Hash A (1_1_A.jpg,
-// * 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case 1: One node for
-// * Hash A (1_1_A.jpg, 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case
-// * 2: No matches If I only search in the current case (existing mode), allowing
-// * all data sources: One node for Hash C (3_1_C.jpg, 3_2_C.jpg)
-// *
-// */
-//public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
-//
-// private final InterCaseTestUtils utils;
-//
-// public static Test suite() {
-// NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(IngestedWithHashAndFileTypeInterCaseTests.class).
-// clusters(".*").
-// enableModules(".*");
-// return conf.suite();
-// }
-//
-// public IngestedWithHashAndFileTypeInterCaseTests(String name) {
-// super(name);
-// this.utils = new InterCaseTestUtils(this);
-// }
-//
-// @Override
-// public void setUp() {
-// this.utils.clearTestDir();
-// try {
-// this.utils.enableCentralRepo();
-//
-// String[] cases = new String[]{
-// CASE1,
-// CASE2,
-// CASE3};
-//
-// Path[][] paths = {
-// {this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
-// {this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
-// {this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
-//
-// this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
-// } catch (TskCoreException | EamDbException ex) {
-// Exceptions.printStackTrace(ex);
-// Assert.fail(ex.getMessage());
-// }
-// }
-//
-// @Override
-// public void tearDown() {
-// this.utils.clearTestDir();
-// this.utils.tearDown();
-// }
-//
-// /**
-// * Search All cases with no file type filtering.
-// */
-// public void testOne() {
-// try {
-// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
-// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
-//
-// assertTrue("Results should not be empty", metadata.size() != 0);
-//
-// //case 1 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
-//
-// //case 1 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
-//
-// //case 2 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
-//
-// //case 2 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
-//
-// //case 3 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
-//
-// //case 3 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
-//
-// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
-// Exceptions.printStackTrace(ex);
-// Assert.fail(ex.getMessage());
-// }
-// }
-//
-// /**
-// * Search All cases with no file type filtering.
-// */
-// public void testTwo() {
-// try {
-// int matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(CASE2);
-// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
-// AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
-//
-// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
-//
-// assertTrue("Results should not be empty", metadata.size() != 0);
-//
-// //case 1 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
-//
-// //case 1 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
-//
-// //case 2 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
-//
-// //case 2 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
-//
-// //case 3 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
-//
-// //case 3 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
-//
-// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
-// Exceptions.printStackTrace(ex);
-// Assert.fail(ex.getMessage());
-// }
-// }
-//
-// /**
-// * We should be able to observe that certain files are no longer returned in
-// * the result set since they exist too frequently
-// */
-// public void testThree() {
-// try {
-//
-// CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
-// AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
-//
-// CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
-// metadata.filterMetadata();
-// assertTrue("Results should not be empty", metadata.size() != 0);
-//
-// //case 1 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
-//
-// //case 1 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
-//
-// //case 2 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
-//
-// //case 2 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
-//
-// //case 3 data set 1
-// assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
-//
-// //case 3 data set 2
-// assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
-// assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
-//
-// } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
-// Exceptions.printStackTrace(ex);
-// Assert.fail(ex.getMessage());
-// }
-// }
-//}
+/*
+ *
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.commonfilessearch;
+
+import java.nio.file.Path;
+import java.sql.SQLException;
+import java.util.Set;
+import junit.framework.Test;
+import org.netbeans.junit.NbModuleSuite;
+import org.netbeans.junit.NbTestCase;
+import org.openide.util.Exceptions;
+import junit.framework.Assert;
+import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
+import org.sleuthkit.autopsy.commonfilesearch.AbstractCommonAttributeSearcher;
+import org.sleuthkit.autopsy.commonfilesearch.AllInterCaseCommonAttributeSearcher;
+import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
+import org.sleuthkit.autopsy.commonfilesearch.SingleInterCaseCommonAttributeSearcher;
+import static org.sleuthkit.autopsy.commonfilessearch.InterCaseTestUtils.*;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * Tests with case 3 as the current case.
+ *
+ * If I use the search all cases option: One node for Hash A (1_1_A.jpg,
+ * 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case 1: One node for
+ * Hash A (1_1_A.jpg, 1_2_A.jpg, 3_1_A.jpg) If I search for matches only in Case
+ * 2: No matches If I only search in the current case (existing mode), allowing
+ * all data sources: One node for Hash C (3_1_C.jpg, 3_2_C.jpg)
+ *
+ */
+public class IngestedWithHashAndFileTypeInterCaseTests extends NbTestCase {
+
+ private final InterCaseTestUtils utils;
+
+ public static Test suite() {
+ NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(IngestedWithHashAndFileTypeInterCaseTests.class).
+ clusters(".*").
+ enableModules(".*");
+ return conf.suite();
+ }
+
+ public IngestedWithHashAndFileTypeInterCaseTests(String name) {
+ super(name);
+ this.utils = new InterCaseTestUtils(this);
+ }
+
+ @Override
+ public void setUp() {
+ this.utils.clearTestDir();
+ try {
+ this.utils.enableCentralRepo();
+
+ String[] cases = new String[]{
+ CASE1,
+ CASE2,
+ CASE3};
+
+ Path[][] paths = {
+ {this.utils.case1DataSet1Path, this.utils.case1DataSet2Path},
+ {this.utils.case2DataSet1Path, this.utils.case2DataSet2Path},
+ {this.utils.case3DataSet1Path, this.utils.case3DataSet2Path}};
+
+ this.utils.createCases(cases, paths, this.utils.getIngestSettingsForHashAndFileType(), InterCaseTestUtils.CASE3);
+ } catch (TskCoreException | EamDbException ex) {
+ Exceptions.printStackTrace(ex);
+ Assert.fail(ex.getMessage());
+ }
+ }
+
+ @Override
+ public void tearDown() {
+ this.utils.clearTestDir();
+ this.utils.tearDown();
+ }
+
+ /**
+ * Search All cases with no file type filtering.
+ */
+ public void testOne() {
+ try {
+ AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, this.utils.FILE_TYPE, 0);
+ CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
+
+ assertTrue("Results should not be empty", metadata.size() != 0);
+
+ //case 1 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
+
+ //case 1 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
+
+ //case 2 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
+
+ //case 2 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
+
+ //case 3 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
+
+ //case 3 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
+
+ } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
+ Exceptions.printStackTrace(ex);
+ Assert.fail(ex.getMessage());
+ }
+ }
+
+ /**
+ * Search All cases with no file type filtering.
+ */
+ public void testTwo() {
+ try {
+ int matchesMustAlsoBeFoundInThisCase = 0;
+
+ // Filter out the time stamp to get the correct case name.
+ Set caseNames = this.utils.getCaseMap().keySet();
+ for (String caseName : caseNames) {
+ if (caseName.substring(0, caseName.length() - 20).equalsIgnoreCase(CASE2)) {
+ // Case match found. Get the number of matches.
+ matchesMustAlsoBeFoundInThisCase = this.utils.getCaseMap().get(caseName);
+ }
+ }
+ CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
+ AbstractCommonAttributeSearcher builder = new SingleInterCaseCommonAttributeSearcher(matchesMustAlsoBeFoundInThisCase, false, false, fileType, 0);
+
+ CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
+
+ assertTrue("Results should not be empty", metadata.size() != 0);
+
+ //case 1 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
+
+ //case 1 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 1, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
+
+ //case 2 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
+
+ //case 2 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
+
+ //case 3 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 1, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
+
+ //case 3 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
+
+ } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
+ Exceptions.printStackTrace(ex);
+ Assert.fail(ex.getMessage());
+ }
+ }
+
+ /**
+ * We should be able to observe that certain files are no longer returned in
+ * the result set since they exist too frequently
+ */
+ public void testThree() {
+ try {
+
+ CorrelationAttributeInstance.Type fileType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
+ AbstractCommonAttributeSearcher builder = new AllInterCaseCommonAttributeSearcher(false, false, fileType, 50);
+
+ CommonAttributeCountSearchResults metadata = builder.findMatchesByCount();
+ metadata.filterMetadata();
+ assertTrue("Results should not be empty", metadata.size() != 0);
+
+ //case 1 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_1, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_1, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_1, CASE1));
+
+ //case 1 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_0_DAT, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_0_DAT, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_PDF, CASE1_DATASET_2, CASE1));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE1_DATASET_2, CASE1", 0, getInstanceCount(metadata, HASH_A_JPG, CASE1_DATASET_2, CASE1));
+
+ //case 2 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_B_PDF, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_PDF, CASE2_DATASET_1, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_B_JPG, CASE2_DATASET_1, CASE2", 0, getInstanceCount(metadata, HASH_B_JPG, CASE2_DATASET_1, CASE2));
+
+ //case 2 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_PDF, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE2_DATASET_2, CASE2", 0, getInstanceCount(metadata, HASH_A_JPG, CASE2_DATASET_2, CASE2));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE2_DATASET_2, CASE2", 1, getInstanceCount(metadata, HASH_D_DOC, CASE2_DATASET_2, CASE2));
+
+ //case 3 data set 1
+ assertEquals("Verify Existence or Count failed for HASH_A_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_A_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_A_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_1, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_JPG, CASE3_DATASET_1, CASE3", 0, getInstanceCount(metadata, HASH_D_JPG, CASE3_DATASET_1, CASE3));
+
+ //case 3 data set 2
+ assertEquals("Verify Existence or Count failed for HASH_C_JPG, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_JPG, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_C_PDF, CASE3_DATASET_2, CASE3", 0, getInstanceCount(metadata, HASH_C_PDF, CASE3_DATASET_2, CASE3));
+ assertEquals("Verify Existence or Count failed for HASH_D_DOC, CASE3_DATASET_2, CASE3", 1, getInstanceCount(metadata, HASH_D_DOC, CASE3_DATASET_2, CASE3));
+
+ } catch (TskCoreException | NoCurrentCaseException | SQLException | EamDbException ex) {
+ Exceptions.printStackTrace(ex);
+ Assert.fail(ex.getMessage());
+ }
+ }
+}
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
index 01cf1f7bcc..eb95ff61f0 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
@@ -59,6 +59,7 @@ import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeCountSearchResults;
import org.sleuthkit.autopsy.datamodel.utils.DataSourceLoader;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValue;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValueList;
+import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
import org.sleuthkit.autopsy.modules.dataSourceIntegrity.DataSourceIntegrityModuleFactory;
import org.sleuthkit.autopsy.modules.embeddedfileextractor.EmbeddedFileExtractorModuleFactory;
@@ -426,7 +427,7 @@ class InterCaseTestUtils {
boolean sameFileName = testFileName.equalsIgnoreCase(fileName);
boolean sameDataSource = testDataSource.equalsIgnoreCase(dataSource);
- boolean sameCrCase = testCaseName.equalsIgnoreCase(crCase);
+ boolean sameCrCase = TimeStampUtils.removeTimeStamp(testCaseName).equalsIgnoreCase(crCase);
if (sameFileName && sameDataSource && sameCrCase) {
tally++;
@@ -443,7 +444,7 @@ class InterCaseTestUtils {
final String testDataSource = node.getDataSource();
boolean sameFileName = testFileName.equalsIgnoreCase(fileName);
- boolean sameCaseName = testCaseName.equalsIgnoreCase(crCase);
+ boolean sameCaseName = TimeStampUtils.removeTimeStamp(testCaseName).equalsIgnoreCase(crCase);
boolean sameDataSource = testDataSource.equalsIgnoreCase(dataSource);
if (sameFileName && sameDataSource && sameCaseName) {
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/ingest/EmbeddedFileTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/ingest/EmbeddedFileTest.java
index af9266b36f..25d691ddbc 100755
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/ingest/EmbeddedFileTest.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/ingest/EmbeddedFileTest.java
@@ -90,51 +90,51 @@ public class EmbeddedFileTest extends NbTestCase {
CaseUtils.closeCurrentCase();
}
-// public void testEncryptionAndZipBomb() {
-// try {
-// List results = openCase.getSleuthkitCase().findAllFilesWhere("name LIKE '%%'");
-// final String zipBombSetName = "Possible Zip Bomb";
-// final String protectedName1 = "password_protected.zip";
-// final String protectedName2 = "level1_protected.zip";
-// final String protectedName3 = "42.zip";
-// final String depthZipBomb = "DepthTriggerZipBomb.zip";
-// final String ratioZipBomb = "RatioTriggerZipBomb.zip";
-// int zipBombs = 0;
-// assertEquals("The number of files in the test image has changed", 2221, results.size());
-// int passwdProtectedZips = 0;
-// for (AbstractFile file : results) {
-// //.zip file has artifact TSK_ENCRYPTION_DETECTED
-// if (file.getName().equalsIgnoreCase(protectedName1) || file.getName().equalsIgnoreCase(protectedName2) || file.getName().equalsIgnoreCase(protectedName3)) {
-// ArrayList artifacts = file.getAllArtifacts();
-// assertEquals("Password protected zip file " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size());
-// for (BlackboardArtifact artifact : artifacts) {
-// assertEquals("Artifact for password protected zip file " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID());
-// passwdProtectedZips++;
-// }
-// } else if (file.getName().equalsIgnoreCase(depthZipBomb) || file.getName().equalsIgnoreCase(ratioZipBomb)) {
-// ArrayList artifacts = file.getAllArtifacts();
-// assertEquals("Zip bomb " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size());
-// for (BlackboardArtifact artifact : artifacts) {
-// assertEquals("Artifact for Zip bomb " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID());
-// BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
-// assertNotNull("No attribute found for artifact on zip bomb " + file.getName(), attribute);
-// assertEquals("Interesting artifact on file, " + file.getName() + ", does not reflect it being a zip bomb", zipBombSetName, attribute.getDisplayString());
-// zipBombs++;
-// }
-// } else {//No other files have artifact defined
-// assertEquals("Unexpected file, " + file.getName() + ", has artifacts", 0, file.getAllArtifacts().size());
-// }
-//
-// }
-// //Make sure 3 password protected zip files have been tested: password_protected.zip, level1_protected.zip and 42.zip that we download for bomb testing.
-// assertEquals("Unexpected number of artifacts reflecting password protected zip files found", 3, passwdProtectedZips);
-// //Make sure 2 zip bomb files have been tested: DepthTriggerZipBomb.zip and RatioTriggerZipBomb.zip.
-// assertEquals("Unexpected number of artifacts reflecting zip bombs found", 2, zipBombs);
-// } catch (TskCoreException ex) {
-// Exceptions.printStackTrace(ex);
-// Assert.fail(ex.getMessage());
-// }
-// }
+ public void testEncryptionAndZipBomb() {
+ try {
+ List results = openCase.getSleuthkitCase().findAllFilesWhere("name LIKE '%%'");
+ final String zipBombSetName = "Possible Zip Bomb";
+ final String protectedName1 = "password_protected.zip";
+ final String protectedName2 = "level1_protected.zip";
+ final String protectedName3 = "42.zip";
+ final String depthZipBomb = "DepthTriggerZipBomb.zip";
+ final String ratioZipBomb = "RatioTriggerZipBomb.zip";
+ int zipBombs = 0;
+ assertEquals("The number of files in the test image has changed", 2221, results.size());
+ int passwdProtectedZips = 0;
+ for (AbstractFile file : results) {
+ //.zip file has artifact TSK_ENCRYPTION_DETECTED
+ if (file.getName().equalsIgnoreCase(protectedName1) || file.getName().equalsIgnoreCase(protectedName2) || file.getName().equalsIgnoreCase(protectedName3)) {
+ ArrayList artifacts = file.getAllArtifacts();
+ assertEquals("Password protected zip file " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size());
+ for (BlackboardArtifact artifact : artifacts) {
+ assertEquals("Artifact for password protected zip file " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID());
+ passwdProtectedZips++;
+ }
+ } else if (file.getName().equalsIgnoreCase(depthZipBomb) || file.getName().equalsIgnoreCase(ratioZipBomb)) {
+ ArrayList artifacts = file.getAllArtifacts();
+ assertEquals("Zip bomb " + file.getName() + " has incorrect number of artifacts", 1, artifacts.size());
+ for (BlackboardArtifact artifact : artifacts) {
+ assertEquals("Artifact for Zip bomb " + file.getName() + " has incorrect type ID", artifact.getArtifactTypeID(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID());
+ BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
+ assertNotNull("No attribute found for artifact on zip bomb " + file.getName(), attribute);
+ assertEquals("Interesting artifact on file, " + file.getName() + ", does not reflect it being a zip bomb", zipBombSetName, attribute.getDisplayString());
+ zipBombs++;
+ }
+ } else {//No other files have artifact defined
+ assertEquals("Unexpected file, " + file.getName() + ", has artifacts", 0, file.getAllArtifacts().size());
+ }
+
+ }
+ //Make sure 3 password protected zip files have been tested: password_protected.zip, level1_protected.zip and 42.zip that we download for bomb testing.
+ assertEquals("Unexpected number of artifacts reflecting password protected zip files found", 3, passwdProtectedZips);
+ //Make sure 2 zip bomb files have been tested: DepthTriggerZipBomb.zip and RatioTriggerZipBomb.zip.
+ assertEquals("Unexpected number of artifacts reflecting zip bombs found", 2, zipBombs);
+ } catch (TskCoreException ex) {
+ Exceptions.printStackTrace(ex);
+ Assert.fail(ex.getMessage());
+ }
+ }
public void testBigFolder() {
final int numOfFilesToTest = 1000;
diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanelTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanelTest.java
new file mode 100755
index 0000000000..cfbc5e0cf6
--- /dev/null
+++ b/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsPanelTest.java
@@ -0,0 +1,107 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.directorytree;
+
+import junit.framework.Assert;
+import org.junit.Test;
+import org.sleuthkit.autopsy.directorytree.ExternalViewerRule.RuleType;
+
+/**
+ * 72% code coverage of ExternalViewerGlobalSettingsPanel
+ */
+public class ExternalViewerGlobalSettingsPanelTest {
+
+ static final String[] testColumnNames = {"A", "B"};
+
+ /**
+ * Default constructor for JUnit
+ */
+ public ExternalViewerGlobalSettingsPanelTest(){
+ //Codacy complains if there is no comment here
+ }
+
+ @Test
+ public void testEnableButtons() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ ExternalViewerGlobalSettingsPanel panel = new ExternalViewerGlobalSettingsPanel(testModel);
+ Assert.assertFalse(panel.enableButtons());
+
+ testModel.addRule(new ExternalViewerRule("image/png", "fake.exe", RuleType.MIME));
+
+ Assert.assertFalse(panel.enableButtons());
+ panel.setSelectionInterval(0, 0);
+ Assert.assertTrue(panel.enableButtons());
+ }
+
+ @Test
+ public void testDisableButtons() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ ExternalViewerGlobalSettingsPanel panel = new ExternalViewerGlobalSettingsPanel(testModel);
+
+ testModel.addRule(new ExternalViewerRule("image/png", "fake.exe", RuleType.MIME));
+ Assert.assertFalse(panel.enableButtons());
+ panel.setSelectionInterval(0, 0);
+ Assert.assertTrue(panel.enableButtons());
+
+ testModel.removeRule(0);
+ Assert.assertFalse(panel.enableButtons());
+ }
+
+ @Test
+ public void testDeleteRuleButtonClick() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ ExternalViewerGlobalSettingsPanel testPanel = new ExternalViewerGlobalSettingsPanel(testModel);
+ Assert.assertFalse(testPanel.enableButtons());
+
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+ testPanel.setSelectionInterval(0, 0);
+ Assert.assertTrue(testPanel.enableButtons());
+ Assert.assertEquals(1, testModel.getRowCount());
+
+ testPanel.deleteRuleButtonClick(0);
+
+ Assert.assertFalse(testPanel.enableButtons());
+ Assert.assertEquals(0, testModel.getRowCount());
+ }
+
+ @Test(expected = IndexOutOfBoundsException.class)
+ public void testDeleteButtonClickFail() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ ExternalViewerGlobalSettingsPanel testPanel = new ExternalViewerGlobalSettingsPanel(testModel);
+
+ testPanel.deleteRuleButtonClick(-1);
+ }
+
+ @Test
+ public void testSingleSelection() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ ExternalViewerGlobalSettingsPanel testPanel = new ExternalViewerGlobalSettingsPanel(testModel);
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+ testModel.addRule(new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT));
+
+ testPanel.setSelectionInterval(0, 2);
+
+ Assert.assertFalse(testPanel.isSelected(0));
+ Assert.assertFalse(testPanel.isSelected(1));
+ Assert.assertTrue(testPanel.isSelected(2));
+ }
+}
diff --git a/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsTableModelTest.java b/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsTableModelTest.java
new file mode 100755
index 0000000000..72118c7e3e
--- /dev/null
+++ b/Core/test/unit/src/org/sleuthkit/autopsy/directorytree/ExternalViewerGlobalSettingsTableModelTest.java
@@ -0,0 +1,217 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.directorytree;
+
+import junit.framework.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.sleuthkit.autopsy.directorytree.ExternalViewerRule.RuleType;
+
+/**
+ * 100% code coverage of ExternalViewerGlobalSettingsTableModel
+ */
+public class ExternalViewerGlobalSettingsTableModelTest {
+
+ static final String[] testColumnNames = {"A", "B"};
+ private ExternalViewerRule pngMime;
+ private ExternalViewerRule txtExt;
+ private ExternalViewerRule wavExt;
+
+ /**
+ * Initialize JUnit test
+ */
+ public ExternalViewerGlobalSettingsTableModelTest() {
+ //Empty constructor
+ }
+
+ @Before
+ public void setUp() {
+ pngMime = new ExternalViewerRule("image/png", "test.exe", RuleType.MIME);
+ txtExt = new ExternalViewerRule(".txt", "notepad.exe", RuleType.EXT);
+ wavExt = new ExternalViewerRule(".wav", "video.exe", RuleType.EXT);
+ }
+
+ /**
+ * Test of addRule method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testAddRule() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+
+ Assert.assertEquals(1, testModel.getRowCount());
+
+ ExternalViewerRule rule = testModel.getRuleAt(0);
+ Assert.assertEquals("image/png", rule.getName());
+ Assert.assertEquals("test.exe", rule.getExePath());
+ Assert.assertEquals(RuleType.MIME, rule.getRuleType());
+ }
+
+ /**
+ * Test of getRowCount method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetRowCount() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ Assert.assertEquals(0, testModel.getRowCount());
+
+ testModel.addRule(pngMime);
+ testModel.addRule(txtExt);
+ testModel.addRule(wavExt);
+
+ Assert.assertEquals(3, testModel.getRowCount());
+ }
+
+ /**
+ * Test of getColumnName method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetColumnName() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ Assert.assertEquals("A", testModel.getColumnName(0));
+ Assert.assertEquals("B", testModel.getColumnName(1));
+ }
+
+ @Test(expected = ArrayIndexOutOfBoundsException.class)
+ public void testColumnNameOutOfBounds() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.getColumnName(2);
+ }
+
+ /**
+ * Test of getColumnClass method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetColumnClass() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ Assert.assertEquals(String.class, testModel.getColumnClass(0));
+ }
+
+ /**
+ * Test of getColumnCount method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetColumnCount() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ Assert.assertEquals(2, testModel.getColumnCount());
+ ExternalViewerGlobalSettingsTableModel testModelTwo = new ExternalViewerGlobalSettingsTableModel(new String[] {"A", "B", "C", "D", "E"});
+ Assert.assertEquals(5, testModelTwo.getColumnCount());
+ }
+
+ /**
+ * Test of getValueAt method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetValueAt() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ testModel.addRule(txtExt);
+ testModel.addRule(wavExt);
+
+ Assert.assertEquals(".txt", testModel.getValueAt(1,0));
+ Assert.assertEquals("notepad.exe", testModel.getValueAt(1,1));
+ Assert.assertEquals("image/png", testModel.getValueAt(0,0));
+ Assert.assertEquals("test.exe", testModel.getValueAt(0,1));
+ }
+
+ /**
+ * Test of getRuleAt method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testGetRuleAt() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ testModel.addRule(txtExt);
+ testModel.addRule(wavExt);
+
+ ExternalViewerRule rule = testModel.getRuleAt(1);
+ Assert.assertEquals(".txt", rule.getName());
+ Assert.assertEquals("notepad.exe", rule.getExePath());
+ Assert.assertEquals(RuleType.EXT, rule.getRuleType());
+
+ ExternalViewerRule ruleTwo = testModel.getRuleAt(0);
+ Assert.assertEquals("image/png", ruleTwo.getName());
+ Assert.assertEquals("test.exe", ruleTwo.getExePath());
+ Assert.assertEquals(RuleType.MIME, ruleTwo.getRuleType());
+ }
+
+ /**
+ * Test of setRule method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testSetRule() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ testModel.addRule(txtExt);
+ testModel.addRule(wavExt);
+
+ testModel.setRule(0, txtExt);
+ ExternalViewerRule rule = testModel.getRuleAt(1);
+ Assert.assertEquals(".txt", rule.getName());
+ Assert.assertEquals("notepad.exe", rule.getExePath());
+ Assert.assertEquals(RuleType.EXT, rule.getRuleType());
+
+ testModel.setRule(2, pngMime);
+ ExternalViewerRule ruleTwo = testModel.getRuleAt(2);
+ Assert.assertEquals("image/png", ruleTwo.getName());
+ Assert.assertEquals("test.exe", ruleTwo.getExePath());
+ Assert.assertEquals(RuleType.MIME, ruleTwo.getRuleType());
+ }
+
+ /**
+ * Test of removeRule method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testRemoveRule() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ Assert.assertEquals(1, testModel.getRowCount());
+
+ testModel.removeRule(0);
+ Assert.assertEquals(0, testModel.getRowCount());
+ Assert.assertFalse(testModel.containsRule(pngMime));
+ }
+
+ /**
+ * Test of isCellEditable method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testIsCellEditable() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ Assert.assertFalse(testModel.isCellEditable(0, 0));
+ }
+
+ /**
+ * Test of containsRule method, of class ExternalViewerGlobalSettingsTableModel.
+ */
+ @Test
+ public void testContainsRule() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ Assert.assertTrue(testModel.containsRule(pngMime));
+ }
+
+ @Test
+ public void testNotContains() {
+ ExternalViewerGlobalSettingsTableModel testModel = new ExternalViewerGlobalSettingsTableModel(testColumnNames);
+ testModel.addRule(pngMime);
+ Assert.assertFalse(testModel.containsRule(new ExternalViewerRule("not", "a rule", RuleType.EXT)));
+ Assert.assertFalse(testModel.containsRule(null));
+ }
+}
\ No newline at end of file
diff --git a/ImageGallery/build.xml b/ImageGallery/build.xml
index 725c11d0e1..92d809a96d 100644
--- a/ImageGallery/build.xml
+++ b/ImageGallery/build.xml
@@ -15,11 +15,4 @@
out-of-date/unneeded stuff in the installer-->
-
-
-
-
-
-
-
diff --git a/ImageGallery/nbproject/project.xml b/ImageGallery/nbproject/project.xml
index 16e4146740..183a947954 100644
--- a/ImageGallery/nbproject/project.xml
+++ b/ImageGallery/nbproject/project.xml
@@ -142,8 +142,8 @@
- ext/sqlite-jdbc-3.7.8-SNAPSHOT.jar
- release/modules/ext/sqlite-jdbc-3.7.8-SNAPSHOT.jar
+ ext/sqlite-jdbc-3.25.2.jar
+ release/modules/ext/sqlite-jdbc-3.25.2.jar
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
index 3c72e9a27a..52295254c8 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
@@ -698,7 +698,8 @@ public final class ImageGalleryController {
//grab files with supported mime-types
+ MIMETYPE_CLAUSE //NON-NLS
//grab files with image or video mime-types even if we don't officially support them
- + " OR mime_type LIKE 'video/%' OR mime_type LIKE 'image/%' )"; //NON-NLS
+ + " OR mime_type LIKE 'video/%' OR mime_type LIKE 'image/%' )" //NON-NLS
+ + " ORDER BY parent_path ";
}
/**
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
index 8ab2668531..1c68d6c49d 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
@@ -393,6 +393,7 @@ public class ImageGalleryModule {
* database.
*/
if (controller.isListeningEnabled()) {
+ controller.getGroupManager().resetCurrentPathGroup();
DrawableDB drawableDb = controller.getDatabase();
if (drawableDb.getDataSourceDbBuildStatus(dataSourceObjId) == DrawableDB.DrawableDbBuildStatusEnum.IN_PROGRESS) {
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java
index e95a48c313..6b508c2a4c 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/DrawableDB.java
@@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
- * Copyright 2013-2018 Basis Technology Corp.
+ * Copyright 2013-2019 Basis Technology Corp.
* Contact: carrier sleuthkit org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@@ -36,6 +36,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import static java.util.Objects.isNull;
@@ -77,6 +78,7 @@ import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.DbType;
import org.sleuthkit.datamodel.TskDataException;
+import org.sleuthkit.datamodel.VersionNumber;
import org.sqlite.SQLiteJDBCLoader;
/**
@@ -97,6 +99,16 @@ public final class DrawableDB {
private static final String GROUPS_TABLENAME = "image_gallery_groups"; //NON-NLS
private static final String GROUPS_SEEN_TABLENAME = "image_gallery_groups_seen"; //NON-NLS
+ private static final String IG_DB_INFO_TABLE = "image_gallery_db_info";
+
+ private static final String IG_SCHEMA_MAJOR_VERSION_KEY = "IG_SCHEMA_MAJOR_VERSION";
+ private static final String IG_SCHEMA_MINOR_VERSION_KEY = "IG_SCHEMA_MINOR_VERSION";
+ private static final String IG_CREATION_SCHEMA_MAJOR_VERSION_KEY = "IG_CREATION_SCHEMA_MAJOR_VERSION";
+ private static final String IG_CREATION_SCHEMA_MINOR_VERSION_KEY = "IG_CREATION_SCHEMA_MINOR_VERSION";
+
+ private static final VersionNumber IG_STARTING_SCHEMA_VERSION = new VersionNumber(1, 0, 0); // IG Schema Starting version
+ private static final VersionNumber IG_SCHEMA_VERSION = new VersionNumber(1, 1, 0); // IG Schema Current version
+
private PreparedStatement insertHashSetStmt;
private List preparedStatements = new ArrayList<>();
@@ -216,7 +228,7 @@ public final class DrawableDB {
dbWriteLock();
try {
con = DriverManager.getConnection("jdbc:sqlite:" + dbPath.toString()); //NON-NLS
- if (!initializeDBSchema() || !prepareStatements() || !initializeStandardGroups() || !initializeImageList()) {
+ if (!initializeDBSchema() || !upgradeDBSchema() || !prepareStatements() || !initializeStandardGroups() || !initializeImageList()) {
close();
throw new TskCoreException("Failed to initialize drawables database for Image Gallery use"); //NON-NLS
}
@@ -390,6 +402,34 @@ public final class DrawableDB {
}
}
+ /**
+ * Checks if the specified table exists in Drawable DB
+ *
+ * @param tableName table to check
+ * @return true if the table exists in the database
+ *
+ * @throws SQLException
+ */
+ private boolean doesTableExist(String tableName) throws SQLException {
+ ResultSet tableQueryResults = null;
+ boolean tableExists = false;
+ try (Statement stmt = con.createStatement()) {
+ tableQueryResults = stmt.executeQuery("SELECT name FROM sqlite_master WHERE type='table'"); //NON-NLS
+ while (tableQueryResults.next()) {
+ if (tableQueryResults.getString("name").equalsIgnoreCase(tableName)) {
+ tableExists = true;
+ break;
+ }
+ }
+ }
+ finally {
+ if (tableQueryResults != null) {
+ tableQueryResults.close();
+ }
+ }
+ return tableExists;
+ }
+
private static void deleteDatabaseIfOlderVersion(Path dbPath) throws SQLException, IOException {
if (Files.exists(dbPath)) {
boolean hasDrawableFilesTable = false;
@@ -474,6 +514,8 @@ public final class DrawableDB {
private boolean initializeDBSchema() {
dbWriteLock();
try {
+ boolean existingDB = true;
+
if (isClosed()) {
logger.log(Level.SEVERE, "The drawables database is closed"); //NON-NLS
return false;
@@ -490,6 +532,31 @@ public final class DrawableDB {
* Create tables in the drawables database.
*/
try (Statement stmt = con.createStatement()) {
+
+ // Check if the database is a new or existing database
+ existingDB = doesTableExist("datasources");
+ if (false == doesTableExist(IG_DB_INFO_TABLE)) {
+ try {
+ VersionNumber ig_creation_schema_version = existingDB
+ ? IG_STARTING_SCHEMA_VERSION
+ : IG_SCHEMA_VERSION;
+
+ stmt.execute("CREATE TABLE IF NOT EXISTS " + IG_DB_INFO_TABLE + " (name TEXT PRIMARY KEY, value TEXT NOT NULL)");
+
+ // backfill creation schema ver
+ stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_CREATION_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor() ));
+ stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_CREATION_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor() ));
+
+ // set current schema ver: at DB initialization - current version is same as starting version
+ stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor() ));
+ stmt.execute(String.format("INSERT INTO %s (name, value) VALUES ('%s', '%s')", IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor() ));
+
+ } catch (SQLException ex) {
+ logger.log(Level.SEVERE, "Failed to create ig_db_info table", ex); //NON-NLS
+ return false;
+ }
+ }
+
try {
String sql = "CREATE TABLE IF NOT EXISTS datasources " //NON-NLS
+ "( id INTEGER PRIMARY KEY, " //NON-NLS
@@ -582,12 +649,51 @@ public final class DrawableDB {
* Create tables in the case database.
*/
String autogenKeyType = (DbType.POSTGRESQL == tskCase.getDatabaseType()) ? "BIGSERIAL" : "INTEGER";
+
+ try {
+ VersionNumber ig_creation_schema_version = existingDB
+ ? IG_STARTING_SCHEMA_VERSION
+ : IG_SCHEMA_VERSION;
+
+ String tableSchema = "( id " + autogenKeyType + " PRIMARY KEY, "
+ + " name TEXT UNIQUE NOT NULL,"
+ + " value TEXT NOT NULL )";
+ tskCase.getCaseDbAccessManager().createTable(IG_DB_INFO_TABLE, tableSchema);
+
+ // backfill creation version
+ String creationMajorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_CREATION_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor());
+ String creationMinorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_CREATION_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor());
+
+ // set current version - at the onset, current version is same as creation version
+ String currentMajorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_SCHEMA_MAJOR_VERSION_KEY, ig_creation_schema_version.getMajor());
+ String currentMinorVerSQL = String.format(" (name, value) VALUES ('%s', '%s')", IG_SCHEMA_MINOR_VERSION_KEY, ig_creation_schema_version.getMinor());
+
+ if (DbType.POSTGRESQL == tskCase.getDatabaseType()) {
+ creationMajorVerSQL += " ON CONFLICT DO NOTHING ";
+ creationMinorVerSQL += " ON CONFLICT DO NOTHING ";
+
+ currentMajorVerSQL += " ON CONFLICT DO NOTHING ";
+ currentMinorVerSQL += " ON CONFLICT DO NOTHING ";
+ }
+
+ tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, creationMajorVerSQL);
+ tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, creationMinorVerSQL);
+
+ tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, currentMajorVerSQL);
+ tskCase.getCaseDbAccessManager().insert(IG_DB_INFO_TABLE, currentMinorVerSQL);
+
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Failed to create ig_db_info table in Case database", ex); //NON-NLS
+ return false;
+ }
+
try {
String tableSchema
= "( group_id " + autogenKeyType + " PRIMARY KEY, " //NON-NLS
+ " data_source_obj_id integer DEFAULT 0, "
+ " value VARCHAR(255) not null, " //NON-NLS
+ " attribute VARCHAR(255) not null, " //NON-NLS
+ + " is_analyzed integer DEFAULT 0, "
+ " UNIQUE(data_source_obj_id, value, attribute) )"; //NON-NLS
tskCase.getCaseDbAccessManager().createTable(GROUPS_TABLENAME, tableSchema);
@@ -620,6 +726,250 @@ public final class DrawableDB {
}
}
+ /**
+ * Gets the Schema version from DrawableDB
+ *
+ * @return image gallery schema version in DrawableDB
+ * @throws SQLException
+ * @throws TskCoreException
+ */
+ private VersionNumber getDrawableDbIgSchemaVersion() throws SQLException, TskCoreException {
+
+ Statement statement = con.createStatement();
+ ResultSet resultSet = null;
+
+ try {
+ int majorVersion = -1;
+ String majorVersionStr = null;
+ resultSet = statement.executeQuery(String.format("SELECT value FROM %s WHERE name='%s'", IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY));
+ if (resultSet.next()) {
+ majorVersionStr = resultSet.getString("value");
+ try {
+ majorVersion = Integer.parseInt(majorVersionStr);
+ } catch (NumberFormatException ex) {
+ throw new TskCoreException("Bad value for schema major version = " + majorVersionStr, ex);
+ }
+ } else {
+ throw new TskCoreException("Failed to read schema major version from ig_db_info table");
+ }
+
+ int minorVersion = -1;
+ String minorVersionStr = null;
+ resultSet = statement.executeQuery(String.format("SELECT value FROM %s WHERE name='%s'", IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY));
+ if (resultSet.next()) {
+ minorVersionStr = resultSet.getString("value");
+ try {
+ minorVersion = Integer.parseInt(minorVersionStr);
+ } catch (NumberFormatException ex) {
+ throw new TskCoreException("Bad value for schema minor version = " + minorVersionStr, ex);
+ }
+ } else {
+ throw new TskCoreException("Failed to read schema minor version from ig_db_info table");
+ }
+
+ return new VersionNumber(majorVersion, minorVersion, 0 );
+ }
+ finally {
+ if (resultSet != null) {
+ resultSet.close();
+ }
+ if (statement != null) {
+ statement.close();
+ }
+ }
+ }
+
+ /**
+ * Gets the ImageGallery schema version from CaseDB
+ *
+ * @return image gallery schema version in CaseDB
+ * @throws SQLException
+ * @throws TskCoreException
+ */
+ private VersionNumber getCaseDbIgSchemaVersion() throws TskCoreException {
+
+ // Callback to process result of get version query
+ class GetSchemaVersionQueryResultProcessor implements CaseDbAccessQueryCallback {
+
+ private int version = -1;
+
+ int getVersion() {
+ return version;
+ }
+
+ @Override
+ public void process(ResultSet resultSet) {
+ try {
+ if (resultSet.next()) {
+ String versionStr = resultSet.getString("value");
+ try {
+ version = Integer.parseInt(versionStr);
+ } catch (NumberFormatException ex) {
+ logger.log(Level.SEVERE, "Bad value for version = " + versionStr, ex);
+ }
+ } else {
+ logger.log(Level.SEVERE, "Failed to get version");
+ }
+ }
+ catch (SQLException ex) {
+ logger.log(Level.SEVERE, "Failed to get version", ex); //NON-NLS
+ }
+ }
+ }
+
+ GetSchemaVersionQueryResultProcessor majorVersionResultProcessor = new GetSchemaVersionQueryResultProcessor();
+ GetSchemaVersionQueryResultProcessor minorVersionResultProcessor = new GetSchemaVersionQueryResultProcessor();
+
+ String versionQueryTemplate = "value FROM %s WHERE name = \'%s\' ";
+ tskCase.getCaseDbAccessManager().select(String.format(versionQueryTemplate, IG_DB_INFO_TABLE, IG_SCHEMA_MAJOR_VERSION_KEY), majorVersionResultProcessor);
+ tskCase.getCaseDbAccessManager().select(String.format(versionQueryTemplate, IG_DB_INFO_TABLE, IG_SCHEMA_MINOR_VERSION_KEY), minorVersionResultProcessor);
+
+ return new VersionNumber(majorVersionResultProcessor.getVersion(), minorVersionResultProcessor.getVersion(), 0);
+ }
+
+ /**
+ * Updates the IG schema version in the Drawable DB
+ *
+ * @param version new version number
+ * @param transaction transaction under which the update happens
+ *
+ * @throws SQLException
+ */
+ private void updateDrawableDbIgSchemaVersion(VersionNumber version, DrawableTransaction transaction) throws SQLException, TskCoreException {
+
+ if (transaction == null) {
+ throw new TskCoreException("Schema version update must be done in a transaction");
+ }
+
+ dbWriteLock();
+ try {
+ Statement statement = con.createStatement();
+
+ // update schema version
+ statement.execute(String.format("UPDATE %s SET value = '%s' WHERE name = '%s'", IG_DB_INFO_TABLE, version.getMajor(), IG_SCHEMA_MAJOR_VERSION_KEY ));
+ statement.execute(String.format("UPDATE %s SET value = '%s' WHERE name = '%s'", IG_DB_INFO_TABLE, version.getMinor(), IG_SCHEMA_MINOR_VERSION_KEY ));
+
+ statement.close();
+ }
+ finally {
+ dbWriteUnlock();
+ }
+ }
+
+ /**
+ * Updates the IG schema version in CaseDB
+ *
+ * @param version new version number
+ * @param caseDbTransaction transaction to use to update the CaseDB
+ *
+ * @throws SQLException
+ */
+ private void updateCaseDbIgSchemaVersion(VersionNumber version, CaseDbTransaction caseDbTransaction) throws TskCoreException {
+
+ String updateSQLTemplate = " SET value = %s WHERE name = '%s' ";
+ tskCase.getCaseDbAccessManager().update(IG_DB_INFO_TABLE, String.format(updateSQLTemplate, version.getMajor(), IG_SCHEMA_MAJOR_VERSION_KEY), caseDbTransaction);
+ tskCase.getCaseDbAccessManager().update(IG_DB_INFO_TABLE, String.format(updateSQLTemplate, version.getMinor(), IG_SCHEMA_MINOR_VERSION_KEY), caseDbTransaction);
+ }
+
+
+ /**
+ * Upgrades the DB schema.
+ *
+ * @return true if the upgrade is successful
+ *
+ * @throws SQLException
+ *
+ */
+ private boolean upgradeDBSchema() throws TskCoreException, SQLException {
+
+ // Read current version from the DBs
+ VersionNumber drawableDbIgSchemaVersion = getDrawableDbIgSchemaVersion();
+ VersionNumber caseDbIgSchemaVersion = getCaseDbIgSchemaVersion();
+
+ // Upgrade Schema in both DrawableDB and CaseDB
+ CaseDbTransaction caseDbTransaction = tskCase.beginTransaction();
+ DrawableTransaction transaction = beginTransaction();
+
+ try {
+ caseDbIgSchemaVersion = upgradeCaseDbIgSchema1dot0TO1dot1(caseDbIgSchemaVersion, caseDbTransaction);
+ drawableDbIgSchemaVersion = upgradeDrawableDbIgSchema1dot0TO1dot1(drawableDbIgSchemaVersion, transaction);
+
+ // update the versions in the tables
+ updateCaseDbIgSchemaVersion(caseDbIgSchemaVersion, caseDbTransaction );
+ updateDrawableDbIgSchemaVersion(drawableDbIgSchemaVersion, transaction);
+
+ caseDbTransaction.commit();
+ caseDbTransaction = null;
+ commitTransaction(transaction, false);
+ transaction = null;
+ }
+ catch (TskCoreException | SQLException ex) {
+ if (null != caseDbTransaction) {
+ try {
+ caseDbTransaction.rollback();
+ } catch (TskCoreException ex2) {
+ logger.log(Level.SEVERE, String.format("Failed to roll back case db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
+ }
+ }
+ if (null != transaction) {
+ try {
+ rollbackTransaction(transaction);
+ } catch (SQLException ex2) {
+ logger.log(Level.SEVERE, String.format("Failed to roll back drawables db transaction after error: %s", ex.getMessage()), ex2); //NON-NLS
+ }
+ }
+ throw ex;
+ }
+ return true;
+ }
+
+ /**
+ * Upgrades IG tables in CaseDB from 1.0 to 1.1
+ * Does nothing if the incoming version is not 1.0
+ *
+ * @param currVersion version to upgrade from
+ * @param caseDbTransaction transaction to use for all updates
+ *
+ * @return new version number
+ * @throws TskCoreException
+ */
+ private VersionNumber upgradeCaseDbIgSchema1dot0TO1dot1(VersionNumber currVersion, CaseDbTransaction caseDbTransaction ) throws TskCoreException {
+
+ if (currVersion.getMajor() != 1 ||
+ currVersion.getMinor() != 0) {
+ return currVersion;
+ }
+
+ // 1.0 -> 1.1 upgrade
+ // Add a 'isAnalyzed' column to groups table in CaseDB
+ String alterSQL = " ADD COLUMN is_analyzed integer DEFAULT 1 "; //NON-NLS
+ if (false == tskCase.getCaseDbAccessManager().columnExists(GROUPS_TABLENAME, "is_analyzed", caseDbTransaction )) {
+ tskCase.getCaseDbAccessManager().alterTable(GROUPS_TABLENAME, alterSQL, caseDbTransaction);
+ }
+ return new VersionNumber(1,1,0);
+ }
+
+ /**
+ * Upgrades IG tables in DrawableDB from 1.0 to 1.1
+ * Does nothing if the incoming version is not 1.0
+ *
+ * @param currVersion version to upgrade from
+ * @param transaction transaction to use for all updates
+ *
+ * @return new version number
+ * @throws TskCoreException
+ */
+ private VersionNumber upgradeDrawableDbIgSchema1dot0TO1dot1(VersionNumber currVersion, DrawableTransaction transaction ) throws TskCoreException {
+
+ if (currVersion.getMajor() != 1 ||
+ currVersion.getMinor() != 0) {
+ return currVersion;
+ }
+
+ // There are no changes in DrawableDB schema in 1.0 -> 1.1
+ return new VersionNumber(1,1,0);
+ }
+
@Override
protected void finalize() throws Throwable {
/*
@@ -829,6 +1179,26 @@ public final class DrawableDB {
}
+ /**
+ * Sets the isAnalysed flag in the groups table for the given group to true.
+ *
+ * @param groupKey group key.
+ *
+ * @throws TskCoreException
+ */
+ public void markGroupAnalyzed(GroupKey> groupKey) throws TskCoreException {
+
+
+ String updateSQL = String.format(" SET is_analyzed = %d "
+ + " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d ",
+ 1,
+ SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
+ SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
+ groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0);
+
+ tskCase.getCaseDbAccessManager().update(GROUPS_TABLENAME, updateSQL);
+ }
+
/**
* Removes a file from the drawables databse.
*
@@ -857,6 +1227,14 @@ public final class DrawableDB {
}
}
+ /**
+ * Updates the image file.
+ *
+ * @param f file to update.
+ *
+ * @throws TskCoreException
+ * @throws SQLException
+ */
public void updateFile(DrawableFile f) throws TskCoreException, SQLException {
DrawableTransaction trans = null;
CaseDbTransaction caseDbTransaction = null;
@@ -885,25 +1263,14 @@ public final class DrawableDB {
}
}
- /**
- * Insert basic file data (no groups) into the DB during pre-population
- * phase
- *
- * @param f
- * @param tr
- * @param caseDbTransaction
- */
- public void insertBasicFileData(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) {
- insertOrUpdateFile(f, tr, caseDbTransaction, false);
- }
-
+
/**
* Update an existing entry (or make a new one) into the DB that includes
* group information. Called when a file has been analyzed or during a bulk
* rebuild
*
- * @param f
- * @param tr
+ * @param f file to update
+ * @param tr
* @param caseDbTransaction
*/
public void updateFile(DrawableFile f, DrawableTransaction tr, CaseDbTransaction caseDbTransaction) {
@@ -1285,29 +1652,53 @@ public final class DrawableDB {
}
}
- public Boolean isGroupAnalyzed(GroupKey> gk) throws SQLException, TskCoreException {
- dbWriteLock();
- try {
- if (isClosed()) {
- throw new SQLException("The drawables database is closed");
+ /**
+ * Returns whether or not the given group is analyzed and ready to be viewed.
+ *
+ * @param groupKey group key.
+ * @return true if the group is analyzed.
+ * @throws SQLException
+ * @throws TskCoreException
+ */
+ public Boolean isGroupAnalyzed(GroupKey> groupKey) throws SQLException, TskCoreException {
+
+ // Callback to process result of isAnalyzed query
+ class IsGroupAnalyzedQueryResultProcessor implements CaseDbAccessQueryCallback {
+
+ private boolean isAnalyzed = false;
+
+ boolean getIsAnalyzed() {
+ return isAnalyzed;
}
- try (Statement stmt = con.createStatement()) {
- // In testing, this method appears to be a lot faster than doing one large select statement
- Set fileIDsInGroup = getFileIDsInGroup(gk);
- for (Long fileID : fileIDsInGroup) {
- ResultSet analyzedQuery = stmt.executeQuery("SELECT analyzed FROM drawable_files WHERE obj_id = " + fileID); //NON-NLS
- while (analyzedQuery.next()) {
- if (analyzedQuery.getInt(ANALYZED) == 0) {
- return false;
- }
+
+ @Override
+ public void process(ResultSet resultSet) {
+ try {
+ if (resultSet.next()) {
+ isAnalyzed = resultSet.getInt("is_analyzed") == 1 ? true: false;
}
- return true; // THIS APPEARS TO BE A BUG (see JIRA-1130), THE FOR LOOP EXECUTES AT MOST ONCE
+ } catch (SQLException ex) {
+ logger.log(Level.SEVERE, "Failed to get group is_analyzed", ex); //NON-NLS
}
}
- return false;
- } finally {
- dbWriteUnlock();
}
+
+ IsGroupAnalyzedQueryResultProcessor queryResultProcessor = new IsGroupAnalyzedQueryResultProcessor();
+ try {
+ String groupAnalyzedQueryStmt = String.format("is_analyzed FROM " + GROUPS_TABLENAME
+ + " WHERE attribute = \'%s\' AND value = \'%s\' and data_source_obj_id = %d ",
+ SleuthkitCase.escapeSingleQuotes(groupKey.getAttribute().attrName.toString()),
+ SleuthkitCase.escapeSingleQuotes(groupKey.getValueDisplayName()),
+ groupKey.getAttribute() == DrawableAttribute.PATH ? groupKey.getDataSourceObjId() : 0);
+
+ tskCase.getCaseDbAccessManager().select(groupAnalyzedQueryStmt, queryResultProcessor);
+ return queryResultProcessor.getIsAnalyzed();
+ } catch ( TskCoreException ex) {
+ String msg = String.format("Failed to get group is_analyzed for group key %s", groupKey.getValueDisplayName()); //NON-NLS
+ logger.log(Level.SEVERE, msg, ex);
+ }
+
+ return false;
}
/**
@@ -1497,8 +1888,9 @@ public final class DrawableDB {
return;
}
- String insertSQL = String.format(" (data_source_obj_id, value, attribute) VALUES (%d, \'%s\', \'%s\')",
- ds_obj_id, SleuthkitCase.escapeSingleQuotes(value), SleuthkitCase.escapeSingleQuotes(groupBy.attrName.toString()));
+ int isAnalyzed = (groupBy == DrawableAttribute.PATH) ? 0 : 1;
+ String insertSQL = String.format(" (data_source_obj_id, value, attribute, is_analyzed) VALUES (%d, \'%s\', \'%s\', %d)",
+ ds_obj_id, SleuthkitCase.escapeSingleQuotes(value), SleuthkitCase.escapeSingleQuotes(groupBy.attrName.toString()), isAnalyzed);
if (DbType.POSTGRESQL == tskCase.getDatabaseType()) {
insertSQL += " ON CONFLICT DO NOTHING";
}
@@ -1775,8 +2167,13 @@ public final class DrawableDB {
*/
public class DrawableTransaction {
- private final Set updatedFiles = new HashSet<>();
- private final Set removedFiles = new HashSet<>();
+ // The files are processed ORDERED BY parent path
+ // We want to preserve that order here, so that we can detect a
+ // change in path, and thus mark the path group as analyzed
+ // Hence we use a LinkedHashSet here.
+ private final Set updatedFiles = new LinkedHashSet<>();
+ private final Set removedFiles = new LinkedHashSet<>();
+
private boolean completed;
private DrawableTransaction() throws TskCoreException, SQLException {
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
index 82fc6f7469..c0c87c821f 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
@@ -107,6 +107,12 @@ public class GroupManager {
private final ImageGalleryController controller;
+ /**
+ * Keeps track of the current path group
+ * - a change in path indicates the current path group is analyzed
+ */
+ @GuardedBy("this") //NOPMD
+ private GroupKey> currentPathGroup = null;
/**
* list of all analyzed groups
*/
@@ -238,7 +244,7 @@ public class GroupManager {
setGroupBy(DrawableAttribute.PATH);
setSortOrder(SortOrder.ASCENDING);
setDataSource(null);
-
+
unSeenGroups.forEach(controller.getCategoryManager()::unregisterListener);
unSeenGroups.clear();
analyzedGroups.forEach(controller.getCategoryManager()::unregisterListener);
@@ -618,6 +624,8 @@ public class GroupManager {
for (GroupKey> gk : groupsForFile) {
// see if a group has been created yet for the key
DrawableGroup g = getGroupForKey(gk);
+
+ updateCurrentPathGroup(gk);
addFileToGroup(g, gk, fileId);
}
}
@@ -625,7 +633,58 @@ public class GroupManager {
//we fire this event for all files so that the category counts get updated during initial db population
controller.getCategoryManager().fireChange(updatedFileIDs, null);
}
+
+ /**
+ * Checks if the given path is different from the current path group.
+ * If so, updates the current path group as analyzed, and sets current path
+ * group to the given path.
+ *
+ * The idea is that when the path of the files being processed changes,
+ * we have moved from one folder to the next, and the group for the
+ * previous PATH can be considered as analyzed and can be displayed.
+ *
+ * NOTE: this a close approximation for when all files in a folder have been processed,
+ * but there's some room for error - files may go down the ingest pipleline
+ * out of order or the events may not always arrive in the same order
+ *
+ * @param groupKey
+ */
+ synchronized private void updateCurrentPathGroup(GroupKey> groupKey) {
+ try {
+ if (groupKey.getAttribute() == DrawableAttribute.PATH) {
+
+ if (this.currentPathGroup == null) {
+ currentPathGroup = groupKey;
+ }
+ else if (groupKey.getValue().toString().equalsIgnoreCase(this.currentPathGroup.getValue().toString()) == false) {
+ // mark the last path group as analyzed
+ getDrawableDB().markGroupAnalyzed(currentPathGroup);
+ popuplateIfAnalyzed(currentPathGroup, null);
+
+ currentPathGroup = groupKey;
+ }
+ }
+ }
+ catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, String.format("Error setting is_analyzed status for group: %s", groupKey.getValue().toString()), ex); //NON-NLS
+ }
+ }
+ /**
+ * Resets current path group, after marking the current path group as analyzed.
+ */
+ synchronized public void resetCurrentPathGroup() {
+ try {
+ if (currentPathGroup != null) {
+ getDrawableDB().markGroupAnalyzed(currentPathGroup);
+ popuplateIfAnalyzed(currentPathGroup, null);
+ currentPathGroup = null;
+ }
+ }
+ catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, String.format("Error resetting last path group: %s", currentPathGroup.getValue().toString()), ex); //NON-NLS
+ }
+ }
/**
* If the group is analyzed (or other criteria based on grouping) and should
* be shown to the user, then add it to the appropriate data structures so
diff --git a/KeywordSearch/nbproject/project.xml b/KeywordSearch/nbproject/project.xml
index 47be03672b..c66ad369eb 100644
--- a/KeywordSearch/nbproject/project.xml
+++ b/KeywordSearch/nbproject/project.xml
@@ -206,8 +206,8 @@
release/modules/ext/quartz-2.2.0.jar
- ext/sqlite-jdbc-3.19.3.jar
- release/modules/ext/sqlite-jdbc-3.19.3.jar
+ ext/sqlite-jdbc-3.25.2.jar
+ release/modules/ext/sqlite-jdbc-3.25.2.jar
ext/guava-17.0.jar
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
index a7732205a3..f100c2f138 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
@@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
- * Copyright 2012-2018 Basis Technology Corp.
+ * Copyright 2012-2019 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
*
@@ -97,7 +97,6 @@ final class Chrome extends Extract {
this.getHistory();
this.getBookmark();
this.getCookie();
- this.getLogin();
this.getDownload();
}
@@ -591,7 +590,7 @@ final class Chrome extends Extract {
List> tempList = this.dbConnect(temps, LOGIN_QUERY);
logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2}artifacts identified.", new Object[]{getModuleName(), temps, tempList.size()}); //NON-NLS
for (HashMap result : tempList) {
-
+
Collection bbattributes = Arrays.asList(
new BlackboardAttribute(
TSK_URL, PARENT_MODULE_NAME,
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
index c4b4d47e6d..6d3aa18512 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
@@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
- * Copyright 2011-2018 Basis Technology Corp.
+ * Copyright 2011-2019 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad 42six com
@@ -530,6 +530,7 @@ class ExtractIE extends Extract {
logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); //NON-NLS
return bbartifacts;
}
+
}
private Optional parseLine(AbstractFile origFile, String line) {
@@ -581,22 +582,6 @@ class ExtractIE extends Extract {
return Optional.of(new URLVisit(user, realurl, domain, ftime));
}
- @Immutable
- private static class URLVisit {
-
- private final String user;
- private final String url;
- private final String domain;
- private final Long time;
-
- URLVisit(String user, String url, String domain, Long ftime) {
- this.user = user;
- this.url = url;
- this.domain = domain;
- this.time = ftime;
- }
- }
-
/**
*
* Determine if the URL should be ignored.
@@ -612,4 +597,21 @@ class ExtractIE extends Extract {
return StringUtils.isBlank(url)
|| url.toLowerCase().startsWith(RESOURCE_URL_PREFIX);
}
+
+ @Immutable
+ private static class URLVisit {
+
+ private final String user;
+ private final String url;
+ private final String domain;
+ private final Long time;
+
+ URLVisit(String user, String url, String domain, Long ftime) {
+ this.user = user;
+ this.url = url;
+ this.domain = domain;
+ this.time = ftime;
+ }
+
+ }
}
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
index 9b8c4c89c7..c378c73534 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
@@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
- * Copyright 2012-2018 Basis Technology Corp.
+ * Copyright 2012-2019 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad 42six com
@@ -37,6 +37,7 @@ import java.text.SimpleDateFormat;
import java.util.*;
import static java.util.Collections.emptySet;
import static java.util.Collections.singleton;
+import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.logging.Level;
import javax.xml.parsers.DocumentBuilder;
@@ -45,6 +46,7 @@ import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.collections4.CollectionUtils;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
import org.openide.modules.InstalledFileLocator;
+import org.openide.util.Exceptions;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.ExecUtil;
@@ -579,25 +581,66 @@ class ExtractRegistry extends Extract {
return Optional.empty();
}
- private Optional processProfileList(String homeDir, Element artnode, AbstractFile regAbstractFile) throws IllegalArgumentException {
- try {
- List bbattributes = Arrays.asList(
- new BlackboardAttribute(
- TSK_USER_NAME, PARENT_MODULE_NAME,
- artnode.getAttribute("username")), //NON-NLS
- new BlackboardAttribute(
- TSK_USER_ID, PARENT_MODULE_NAME,
- artnode.getAttribute("sid")),//NON-NLS
- new BlackboardAttribute(
- TSK_PATH, PARENT_MODULE_NAME,
- homeDir));
+ private Optional processProfileList(String homeDir, Element artnode, AbstractFile regFile) throws IllegalArgumentException {
+ String sid = artnode.getAttribute("sid"); //NON-NLS
+ String username = artnode.getAttribute("username"); //NON-NLS
+ BlackboardArtifact bbart = null;
- BlackboardArtifact bbart = regAbstractFile.newArtifact(TSK_OS_ACCOUNT);
- bbart.addAttributes(bbattributes);
- return Optional.of(bbart);
+ try {
+ BlackboardAttribute.Type userAttrType = new BlackboardAttribute.Type(TSK_USER_ID);
+
+ //check if any of the existing artifacts match this username
+ ArrayList existingArtifacts = currentCase.getSleuthkitCase().getBlackboardArtifacts(TSK_OS_ACCOUNT);
+
+ for (BlackboardArtifact artifact : existingArtifacts) {
+ if (artifact.getDataSource().getId() == regFile.getDataSourceObjectId()) {
+ BlackboardAttribute attribute = artifact.getAttribute(userAttrType);
+ if (attribute != null && attribute.getValueString().equals(sid)) {
+ bbart = artifact;
+ break;
+ }
+ }
+ }
} catch (TskCoreException ex) {
- logger.log(Level.SEVERE, "Error adding account artifact to blackboard."); //NON-NLS
+ logger.log(Level.WARNING, "Error getting existing os account artifact", ex);
}
+ try {
+ Collection bbattributes = new ArrayList<>();
+ if (bbart == null) {
+ //create new artifact
+ bbart = regFile.newArtifact(TSK_OS_ACCOUNT);
+ bbattributes.addAll(Arrays.asList(
+ new BlackboardAttribute(
+ TSK_USER_NAME, MODULE_NAME,
+ username),
+ new BlackboardAttribute(
+ TSK_USER_ID, MODULE_NAME,
+ sid),
+ new BlackboardAttribute(
+ TSK_PATH, MODULE_NAME,
+ homeDir)
+ ));
+ } else {
+ //add attributes to existing artifact
+ if (bbart.getAttribute(new BlackboardAttribute.Type(TSK_USER_NAME)) == null) {
+ bbattributes.add(new BlackboardAttribute(
+ TSK_USER_NAME, MODULE_NAME,
+ username));
+ }
+ if (bbart.getAttribute(new BlackboardAttribute.Type(TSK_PATH)) == null) {
+ bbattributes.add(new BlackboardAttribute(
+ TSK_PATH, MODULE_NAME,
+ homeDir));
+ }
+ }
+ bbart.addAttributes(bbattributes);
+ // index the artifact for keyword search
+ blackboard.postArtifact(bbart, MODULE_NAME);
+ return Optional.of(bbart);
+ } catch (TskCoreException | Blackboard.BlackboardException ex2) {
+ logger.log(Level.SEVERE, "Error adding account artifact to blackboard.", ex2); //NON-NLS
+ }
+ //NON-NLS
return Optional.empty();
}
@@ -667,6 +710,7 @@ class ExtractRegistry extends Extract {
model = info.getProduct();
}
}
+
List bbattributes = Lists.newArrayList(
new BlackboardAttribute(
TSK_DATETIME, PARENT_MODULE_NAME,
@@ -706,6 +750,7 @@ class ExtractRegistry extends Extract {
} else if (name.equals("Domain")) { // NON-NLS
domain = value;
}
+
}
}
try {
diff --git a/docs/doxygen-user/main.dox b/docs/doxygen-user/main.dox
index 7a06942ab2..e22d5fb20e 100644
--- a/docs/doxygen-user/main.dox
+++ b/docs/doxygen-user/main.dox
@@ -4,9 +4,7 @@
Overview
-----
-This is the User's Guide for the open source Autopsy platform. Autopsy allows you to examine a hard drive or mobile device and recover evidence from it. This guide should help you with using Autopsy. The developer's guide will help you develop your own Autopsy modules.
-
-Autopsy 4 (and 3) are a complete rewrite from Autopsy 2, and none of this document is relevant to Autopsy 2.
+This is the User's Guide for the open source Autopsy platform. Autopsy allows you to examine a hard drive or mobile device and recover evidence from it. This guide should help you with using Autopsy. The developer's guide will help you develop your own Autopsy modules.
Help Topics
-------
diff --git a/docs/doxygen-user/quick_start_guide.dox b/docs/doxygen-user/quick_start_guide.dox
index 1b5cf41234..0a279244cf 100644
--- a/docs/doxygen-user/quick_start_guide.dox
+++ b/docs/doxygen-user/quick_start_guide.dox
@@ -1,28 +1,36 @@
/*! \page quick_start_guide Quick Start Guide
-\section s1 Adding a Data Source (image, local disk, logical files)
+\section s1 Cases and Data Sources
-Data sources are added to a case. A case can have a single data source or it can have multiple data sources. Currently, a single report is generated for an entire case, so if you need to report on individual data sources, then you should use one data source per case. If there are many drives/phones/other data sources for one investigation, then your case should have multiple data sources.
+Autopsy organizes data by case. Each case can have one or more data sources, which can be a disk image, a set of logical files, a USB-connected device, etc.
-\subsection s2 Creating a Case
+Cases can either be single-user or multi-user. Multi-user cases allow several examiners to review the data at the same time and collaborate, but require some additional open source servers to be configured.
+
+When you have several data sources and are deciding about creating creating a case, consider:
+- You can have only one case open at a time
+- Reports are generated at a case-level
+- The application can slow down when there are many large data sources in the same case
+
+\subsection s1a Creating a Case
To create a case, use either the "Create New Case" option on the Welcome screen or from the "Case" menu. This will start the New Case Wizard. You will need to supply it with the name of the case and a directory to store the case results into. You can optionally provide case numbers and reviewer names.
-\subsection s3 Adding a Data Source
+\subsection s1b Adding a Data Source
The next step is to add an input data source to the case. The Add Data Source Wizard will start automatically after the case is created or you can manually start it from the "Case" menu or toolbar. You will need to choose the type of input data source to add (image, local disk, or logical files and folders). Next, supply it with the location of the source to add.
- For a disk image, browse to the first file in the set (Autopsy will find the rest of the files). Autopsy currently supports E01 and raw (dd) files.
-- For local disk, select one of the detected disks. Autopsy will add the current view of the disk to the case (i.e. snapshot of the meta-data). However, the individual file content (not meta-data) does get updated with the changes made to the disk. Note, you may need run Autopsy as an Administrator to detect all disks.
+- For local disk, select one of the detected disks. Autopsy will add the current view of the disk to the case (i.e. snapshot of the meta-data). However, the individual file content (not meta-data) does get updated with the changes made to the disk. You can optionally create a copy of all data read from the local disk to a VHD file, which can be useful for triage situations. Note, you may need run Autopsy as an Administrator to detect all disks.
- For logical files (a single file or folder of files), use the "Add" button to add one or more files or folders on your system to the case. Folders will be recursively added to the case.
-There are a couple of options in the wizard that will allow you to make the ingest process faster. These typically deal with deleted files. It will take longer if unallocated space is analyzed and the entire drive is searched for deleted files. In some scenarios, these recovery steps must be performed and in other scenarios these steps are not needed and instead fast results on the allocated files are needed. Use these options to control how long the analysis will take.
-Autopsy will start to analyze these data sources and add them to the case and the internal database. While it is doing that, it will prompt you to configure the Ingest Modules.
+After supplying the needed data, Autopsy will quickly review the data sources and add minimal metadata to the case databases so that it can schedule the files for analysis. While it is doing that, it will prompt you to configure the Ingest Modules.
-\subsection s4 Ingest Modules
+\subsection s1c Ingest Modules
-You will next be prompted to configure the Ingest Modules. Ingest modules will run in the background and perform specific tasks. The Ingest Modules analyze files in a prioritized order so that files in a user's directory are analyzed before files in other folders. Ingest modules can be developed by third-parties. The standard ingest modules included with Autopsy are:
+Ingest modules are responsible for analyzing the data source contents and will run in the background. The Ingest Modules analyze files in a prioritized order so that files in a user's directory are analyzed before files in other folders. Ingest modules can be developed by third-parties.
+
+The standard ingest modules included with Autopsy are:
- \subpage recent_activity_page extracts user activity as saved by web browsers and the OS. Also runs Regripper on the registry hive.
- \subpage hash_db_page uses hash sets to ignore known files from the NIST NSRL and flag known bad files. Use the "Advanced" button to add and configure the hash sets to use during this process. You will get updates on known bad file hits as the ingest occurs. You can later add hash sets via the Tools -> Options menu in the main UI. You can download an index of the NIST NSRL from http://sourceforge.net/projects/autopsy/files/NSRL/
@@ -36,7 +44,7 @@ You will next be prompted to configure the Ingest Modules. Ingest modules will r
- \subpage android_analyzer_page allows you to parse common items from Android devices. Places artifacts into the BlackBoard.
- \subpage interesting_files_identifier_page searches for files and directories based on user-specified rules in Tools, Options, Interesting Files. It works as a "File Alerting Module". It generates messages in the inbox when specified files are found.
- \subpage photorec_carver_page carves files from unallocated space and sends them through the file processing chain.
-- \subpage cr_ingest_module adds file hashes and other extracted properties to a central repository.
+- \subpage cr_ingest_module adds file hashes and other extracted properties to a central repository for future correlation and to flag previously notable files.
- \subpage encryption_page looks for encrypted files.
- \subpage vm_extractor_page extracts data from virtual machine files
@@ -44,7 +52,9 @@ When you select a module, you will have the option to change its settings. For
While ingest modules are running in the background, you will see a progress bar in the lower right. You can use the GUI to review incoming results and perform other tasks while ingesting at the same time.
-\section s1a Analysis Basics
+\section s2 Analysis Basics
+
+After the ingest modules start to analyze the data source, you'll see the main analysis interface. You can choose to search for specific items, browse to specific folders, or review ingest module results.
\image html screenshot.PNG
@@ -53,7 +63,7 @@ You will start all of your analysis techniques from the tree on the left.
- The Data Sources root node shows all data in the case.
- The individual image nodes show the file system structure of the disk images or local disks in the case.
- The LogicalFileSet nodes show the logical files in the case.
-- The Views node shows the same data from a file type or timeline perspective.
+- The Views node shows the same data from a different perspective, such as organized by file type.
- The Results node shows the output from the ingest modules.
When you select a node from the tree on the left, a list of files will be shown in the upper right. You can use the Thumbnail view in the upper right to view the pictures. When you select a file from the upper right, its contents will be shown in the lower right. You can use the tabs in the lower right to view the text of the file, an image, or the hex data.
@@ -66,23 +76,23 @@ The tree on the left as well as the table on the right have a \ref ui_quick_sear
You can tag (bookmark) arbitrary files so that you can more quickly find them later or so that you can include them specifically in a report.
-\subsection s2a Ingest Inbox
-As you are going through the results in the tree, the ingest modules are running in the background.
-The results are shown in the tree as soon as the ingest modules find them and report them.
+\section s3 Other Analysis Interfaces
-The Ingest Inbox receives messages from the ingest modules as they find results.
-You can open the inbox to see what has been recently found.
-It keeps track of what messages you have read.
+In addition to the 3-panel UI with the tree on the left, there are other interfaces that are more specialized.
-The intended use of this inbox is that you can focus on some data for a while and then check back on the inbox at a time that is convenient for them.
-You can then see what else was found while you were focused on the previous task.
-You may learn that a known bad file was found or that a file was found with a relevant keyword and then decide to focus on that for a while.
+\subsection s3a Timeline
-When you select a message, you can then jump to the Results tree where more details can be found or jump to the file's location in the filesystem.
+The timeline feature can be opened from the "Tools" menu or the toolbar. This will show you file system and other events organized by time using various display techniques. See the \subpage timeline_page section for more details.
-\subsection s2b Timeline
-There is a basic timeline view that you can access via the "Tools", "Make Timeline" feature. This will take a few minutes to create the timeline for analysis. Its features are still in development.
+
+\subsection s3b Image Gallery
+
+The Image Gallery focuses on showing the pictures and videos from the data source organized by folder. It will show you files as soon as they have been hashed and EXIF data extracted. You can open it from the "Tools" menu. See the \subpage image_gallery_page section for more details.
+
+\subsection s3c Communications
+
+The Communications interface focuses on showing which accounts were communicated with the most and what messages were sent. It allows you to focus on certain relationships or communications within a certain date rage. You can open it from the "Tools" menu. See the \subpage communications_page section for more details.
\section s5 Example Use Cases
@@ -110,16 +120,14 @@ If you want to see all images and video on the disk image, then go to the "
Select either "Images" or "Videos".
You can use the thumbnail option in the upper right to view thumbnails of all images.
-Note: We are working on making this more efficient when there are lots of images. We are also working on the feature to display video thumbnails.
You can select an image or video from the upper right and view the video or image in the lower right. Video will be played with sound.
\section s6 Reporting
-A final report can be generated that will include all analysis results.
-Use the "Generate Report" button to create this.
-It will create an HTML or XLS report in the Reports folder of the case folder.
-If you forgot the location of your case folder, you can determine it using the "Case Properties" option in the "Case" menu.
-There is also an option to export report files to a separate folder outside of the case folder.
+A final report can be generated that will include all analysis results using the "Generate Report" toolbar button. Reports can be generated in HTML, XLS, KML, and other formats.
+
+You can later find your generated reports by going to the tree and opening the Reports node at the bottom.
+
*/
\ No newline at end of file