ingestJobs = skCase.getIngestJobs();
diff --git a/Core/src/org/sleuthkit/autopsy/commonfilesearch/InterCasePanel.java b/Core/src/org/sleuthkit/autopsy/commonfilesearch/InterCasePanel.java
index 22454979fb..2cb94571ae 100644
--- a/Core/src/org/sleuthkit/autopsy/commonfilesearch/InterCasePanel.java
+++ b/Core/src/org/sleuthkit/autopsy/commonfilesearch/InterCasePanel.java
@@ -19,6 +19,7 @@
*/
package org.sleuthkit.autopsy.commonfilesearch;
+import org.sleuthkit.autopsy.guiutils.DataSourceComboBoxModel;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
diff --git a/Core/src/org/sleuthkit/autopsy/commonfilesearch/IntraCasePanel.java b/Core/src/org/sleuthkit/autopsy/commonfilesearch/IntraCasePanel.java
index 4841d39e84..fd8f9becac 100644
--- a/Core/src/org/sleuthkit/autopsy/commonfilesearch/IntraCasePanel.java
+++ b/Core/src/org/sleuthkit/autopsy/commonfilesearch/IntraCasePanel.java
@@ -19,6 +19,7 @@
*/
package org.sleuthkit.autopsy.commonfilesearch;
+import org.sleuthkit.autopsy.guiutils.DataSourceComboBoxModel;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java b/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java
index 03a84d4181..06a442eb2a 100644
--- a/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java
+++ b/Core/src/org/sleuthkit/autopsy/contentviewers/Metadata.java
@@ -26,6 +26,8 @@ import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContentViewer;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@@ -123,11 +125,18 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer {
@Messages({
"Metadata.tableRowTitle.mimeType=MIME Type",
- "Metadata.nodeText.truncated=(results truncated)"})
+ "Metadata.nodeText.truncated=(results truncated)",
+ "Metadata.tableRowTitle.sha1=SHA1",
+ "Metadata.tableRowTitle.sha256=SHA256",
+ "Metadata.tableRowTitle.imageType=Type",
+ "Metadata.tableRowTitle.sectorSize=Sector Size",
+ "Metadata.tableRowTitle.timezone=Time Zone",
+ "Metadata.tableRowTitle.deviceId=Device ID"})
@Override
public void setNode(Node node) {
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
- if (file == null) {
+ Image image = node.getLookup().lookup((Image.class));
+ if (file == null && image == null) {
setText(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.nonFilePassedIn"));
return;
}
@@ -135,64 +144,100 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer {
StringBuilder sb = new StringBuilder();
startTable(sb);
- try {
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getUniquePath());
- } catch (TskCoreException ex) {
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getParentPath() + "/" + file.getName());
- }
-
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.type"), file.getType().getName());
- addRow(sb, Bundle.Metadata_tableRowTitle_mimeType(), file.getMIMEType());
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(file.getSize()));
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.fileNameAlloc"), file.getDirFlagAsString());
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.metadataAlloc"), file.getMetaFlagsAsString());
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.modified"), ContentUtils.getStringTime(file.getMtime(), file));
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.accessed"), ContentUtils.getStringTime(file.getAtime(), file));
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.created"), ContentUtils.getStringTime(file.getCrtime(), file));
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.changed"), ContentUtils.getStringTime(file.getCtime(), file));
-
-
- String md5 = file.getMd5Hash();
- if (md5 == null) {
- md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
- }
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.hashLookupResults"), file.getKnown().toString());
-
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(file.getId()));
- if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
- addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), file.getLocalAbsPath());
- }
-
- endTable(sb);
-
- /*
- * If we have a file system file, grab the more detailed metadata text
- * too
- */
- try {
- if (file instanceof FsContent) {
- FsContent fsFile = (FsContent) file;
-
- sb.append("
\n"); //NON-NLS
- sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.text"));
- sb.append("
"); // NON-NLS
- for (String str : fsFile.getMetaDataText()) {
- sb.append(str).append("
"); //NON-NLS
-
- /*
- * Very long results can cause the UI to hang before displaying,
- * so truncate the results if necessary.
- */
- if(sb.length() > 50000){
- sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.truncated"));
- break;
- }
- }
- sb.append("
\n"); //NON-NLS
+ if (file != null) {
+ try {
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getUniquePath());
+ } catch (TskCoreException ex) {
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), file.getParentPath() + "/" + file.getName());
}
- } catch (TskCoreException ex) {
- sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
+
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.type"), file.getType().getName());
+ addRow(sb, Bundle.Metadata_tableRowTitle_mimeType(), file.getMIMEType());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(file.getSize()));
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.fileNameAlloc"), file.getDirFlagAsString());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.metadataAlloc"), file.getMetaFlagsAsString());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.modified"), ContentUtils.getStringTime(file.getMtime(), file));
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.accessed"), ContentUtils.getStringTime(file.getAtime(), file));
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.created"), ContentUtils.getStringTime(file.getCrtime(), file));
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.changed"), ContentUtils.getStringTime(file.getCtime(), file));
+
+
+ String md5 = file.getMd5Hash();
+ if (md5 == null) {
+ md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.hashLookupResults"), file.getKnown().toString());
+
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(file.getId()));
+ if (file.getType().compareTo(TSK_DB_FILES_TYPE_ENUM.LOCAL) == 0) {
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.localPath"), file.getLocalAbsPath());
+ }
+
+ endTable(sb);
+
+ /*
+ * If we have a file system file, grab the more detailed metadata text
+ * too
+ */
+ try {
+ if (file instanceof FsContent) {
+ FsContent fsFile = (FsContent) file;
+
+ sb.append("
\n"); //NON-NLS
+ sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.text"));
+ sb.append("
"); // NON-NLS
+ for (String str : fsFile.getMetaDataText()) {
+ sb.append(str).append("
"); //NON-NLS
+
+ /*
+ * Very long results can cause the UI to hang before displaying,
+ * so truncate the results if necessary.
+ */
+ if(sb.length() > 50000){
+ sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.truncated"));
+ break;
+ }
+ }
+ sb.append("
\n"); //NON-NLS
+ }
+ } catch (TskCoreException ex) {
+ sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
+ }
+ } else {
+ try {
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getUniquePath());
+ } catch (TskCoreException ex) {
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.name"), image.getName());
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.imageType"), image.getType().getName());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.size"), Long.toString(image.getSize()));
+
+ try {
+ String md5 = image.getMd5();
+ if (md5 == null || md5.isEmpty()) {
+ md5 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.md5"), md5);
+
+ String sha1 = image.getSha1();
+ if (sha1 == null || sha1.isEmpty()) {
+ sha1 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha1"), sha1);
+
+ String sha256 = image.getSha256();
+ if (sha256 == null || sha256.isEmpty()) {
+ sha256 = NbBundle.getMessage(this.getClass(), "Metadata.tableRowContent.md5notCalc");
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sha256"), sha256);
+ } catch (TskCoreException ex) {
+ sb.append(NbBundle.getMessage(this.getClass(), "Metadata.nodeText.exceptionNotice.text")).append(ex.getLocalizedMessage());
+ }
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.sectorSize"), Long.toString(image.getSsize()));
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.timezone"), image.getTimeZone());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.deviceId"), image.getDeviceId());
+ addRow(sb, NbBundle.getMessage(this.getClass(), "Metadata.tableRowTitle.internalid"), Long.toString(image.getId()));
}
setText(sb.toString());
@@ -227,8 +272,9 @@ public class Metadata extends javax.swing.JPanel implements DataContentViewer {
@Override
public boolean isSupported(Node node) {
+ Image image = node.getLookup().lookup(Image.class);
AbstractFile file = node.getLookup().lookup(AbstractFile.class);
- return file != null;
+ return (file != null) || (image != null);
}
@Override
diff --git a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java
index 2be9a9b447..e10a650974 100644
--- a/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java
+++ b/Core/src/org/sleuthkit/autopsy/core/UserPreferences.java
@@ -75,6 +75,8 @@ public final class UserPreferences {
public static final String SHOW_ONLY_CURRENT_USER_TAGS = "ShowOnlyCurrentUserTags";
public static final String HIDE_CENTRAL_REPO_COMMENTS_AND_OCCURRENCES = "HideCentralRepoCommentsAndOccurrences";
public static final String DISPLAY_TRANSLATED_NAMES = "DisplayTranslatedNames";
+ public static final String MAXIMUM_NUMBER_OF_RESULTS = "MaximumNumberOfResults";
+ private static final int DEFAULT_MAX_RESULTS = 20000;
// Prevent instantiation.
private UserPreferences() {
@@ -471,4 +473,20 @@ public final class UserPreferences {
public static void setLogFileCount(int count) {
preferences.putInt(MAX_NUM_OF_LOG_FILE, count);
}
+
+ /**
+ * Set the maximum number of result rows to show in data result tables.
+ * @param max
+ */
+ public static void setMaximumNumberOfResults(int max) {
+ preferences.putInt(MAXIMUM_NUMBER_OF_RESULTS, max);
+ }
+
+ /**
+ * Get the maximum number of result rows to show in data result tables.
+ * @return
+ */
+ public static int getMaximumNumberOfResults() {
+ return preferences.getInt(MAXIMUM_NUMBER_OF_RESULTS, DEFAULT_MAX_RESULTS);
+ }
}
diff --git a/Core/src/org/sleuthkit/autopsy/core/layer.xml b/Core/src/org/sleuthkit/autopsy/core/layer.xml
index 8bdde0f317..3c87a3df9f 100644
--- a/Core/src/org/sleuthkit/autopsy/core/layer.xml
+++ b/Core/src/org/sleuthkit/autopsy/core/layer.xml
@@ -337,6 +337,11 @@
+
+
+
+
+
- org.sleuthkit.autopsy.modules.e01verify.E01VerifierModuleFactory
+ org.sleuthkit.autopsy.modules.dataSourceIntegrity.DataSourceIntegrityModuleFactory
diff --git a/Core/src/org/sleuthkit/autopsy/modules/case_uco/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/case_uco/Bundle.properties
new file mode 100755
index 0000000000..8c17dbd39f
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/case_uco/Bundle.properties
@@ -0,0 +1,4 @@
+OpenIDE-Module-Name=CaseUcoModule
+ReportCaseUco.getName.text=CASE-UCO
+ReportCaseUco.getDesc.text=CASE-UCO format report with basic property fields for every file.
+ReportCaseUcoConfigPanel.jLabelSelectDataSource.text=Select a data source for the CASE-UCO report
diff --git a/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUco.java b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUco.java
new file mode 100755
index 0000000000..bde76ecb70
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUco.java
@@ -0,0 +1,458 @@
+ /*
+ *
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2012-2018 Basis Technology Corp.
+ * Project Contact/Architect: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.case_uco;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.logging.Level;
+import javax.swing.JPanel;
+import com.fasterxml.jackson.core.JsonEncoding;
+import com.fasterxml.jackson.core.JsonFactory;
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.util.DefaultIndenter;
+import com.fasterxml.jackson.core.util.DefaultPrettyPrinter;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.SimpleTimeZone;
+import org.openide.util.NbBundle;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
+import org.sleuthkit.autopsy.datamodel.ContentUtils;
+import org.sleuthkit.autopsy.ingest.IngestManager;
+import org.sleuthkit.autopsy.report.GeneralReportModule;
+import org.sleuthkit.autopsy.report.ReportProgressPanel;
+import org.sleuthkit.autopsy.report.ReportProgressPanel.ReportStatus;
+import org.sleuthkit.datamodel.*;
+
+/**
+ * ReportCaseUco generates a report in the CASE-UCO format. It saves basic
+ file info like full caseDirPath, name, MIME type, times, and hash.
+ */
+class ReportCaseUco implements GeneralReportModule {
+
+ private static final Logger logger = Logger.getLogger(ReportCaseUco.class.getName());
+ private static ReportCaseUco instance = null;
+ private ReportCaseUcoConfigPanel configPanel;
+
+ private static final String REPORT_FILE_NAME = "CASE_UCO_output.json-ld";
+
+ // Hidden constructor for the report
+ private ReportCaseUco() {
+ }
+
+ // Get the default implementation of this report
+ public static synchronized ReportCaseUco getDefault() {
+ if (instance == null) {
+ instance = new ReportCaseUco();
+ }
+ return instance;
+ }
+
+ /**
+ * Generates a CASE-UCO format report.
+ *
+ * @param baseReportDir caseDirPath to save the report
+ * @param progressPanel panel to update the report's progress
+ */
+ @NbBundle.Messages({
+ "ReportCaseUco.notInitialized=CASE-UCO settings panel has not been initialized",
+ "ReportCaseUco.noDataSourceSelected=No data source selected for CASE-UCO report",
+ "ReportCaseUco.noCaseOpen=Unable to open currect case",
+ "ReportCaseUco.unableToCreateDirectories=Unable to create directory for CASE-UCO report",
+ "ReportCaseUco.initializing=Creating directories...",
+ "ReportCaseUco.querying=Querying files...",
+ "ReportCaseUco.ingestWarning=Warning, this report will be created before ingest services completed",
+ "ReportCaseUco.processing=Saving files in CASE-UCO format...",
+ "ReportCaseUco.srcModuleName.text=CASE-UCO Report"
+ })
+ @Override
+ @SuppressWarnings("deprecation")
+ public void generateReport(String baseReportDir, ReportProgressPanel progressPanel) {
+
+ if (configPanel == null) {
+ logger.log(Level.SEVERE, "CASE-UCO settings panel has not been initialized"); //NON-NLS
+ MessageNotifyUtil.Message.error(Bundle.ReportCaseUco_notInitialized());
+ progressPanel.complete(ReportStatus.ERROR);
+ return;
+ }
+
+ Long selectedDataSourceId = configPanel.getSelectedDataSourceId();
+ if (selectedDataSourceId == ReportCaseUcoConfigPanel.NO_DATA_SOURCE_SELECTED) {
+ logger.log(Level.SEVERE, "No data source selected for CASE-UCO report"); //NON-NLS
+ MessageNotifyUtil.Message.error(Bundle.ReportCaseUco_noDataSourceSelected());
+ progressPanel.complete(ReportStatus.ERROR);
+ return;
+ }
+
+ // Start the progress bar and setup the report
+ progressPanel.setIndeterminate(false);
+ progressPanel.start();
+ progressPanel.updateStatusLabel(Bundle.ReportCaseUco_initializing());
+
+ // Create the JSON generator
+ JsonFactory jsonGeneratorFactory = new JsonFactory();
+ String reportPath = baseReportDir + getRelativeFilePath();
+ java.io.File reportFile = Paths.get(reportPath).toFile();
+ try {
+ Files.createDirectories(Paths.get(reportFile.getParent()));
+ } catch (IOException ex) {
+ logger.log(Level.SEVERE, "Unable to create directory for CASE-UCO report", ex); //NON-NLS
+ MessageNotifyUtil.Message.error(Bundle.ReportCaseUco_unableToCreateDirectories());
+ progressPanel.complete(ReportStatus.ERROR);
+ return;
+ }
+
+ // Check if ingest has finished
+ if (IngestManager.getInstance().isIngestRunning()) {
+ MessageNotifyUtil.Message.warn(Bundle.ReportCaseUco_ingestWarning());
+ }
+
+
+ JsonGenerator jsonGenerator = null;
+ SimpleTimeZone timeZone = new SimpleTimeZone(0, "GMT");
+ try {
+ jsonGenerator = jsonGeneratorFactory.createGenerator(reportFile, JsonEncoding.UTF8);
+ // instert \n after each field for more readable formatting
+ jsonGenerator.setPrettyPrinter(new DefaultPrettyPrinter().withObjectIndenter(new DefaultIndenter(" ", "\n")));
+
+ SleuthkitCase skCase = Case.getCurrentCaseThrows().getSleuthkitCase();
+
+ progressPanel.updateStatusLabel(Bundle.ReportCaseUco_querying());
+
+ // create the required CASE-UCO entries at the beginning of the output file
+ initializeJsonOutputFile(jsonGenerator);
+
+ // create CASE-UCO entry for the Autopsy case
+ String caseTraceId = saveCaseInfo(skCase, jsonGenerator);
+
+ // create CASE-UCO data source entry
+ String dataSourceTraceId = saveDataSourceInfo(selectedDataSourceId, caseTraceId, skCase, jsonGenerator);
+
+ // Run getAllFilesQuery to get all files, exclude directories
+ final String getAllFilesQuery = "select obj_id, name, size, crtime, atime, mtime, md5, parent_path, mime_type, extension from tsk_files where "
+ + "data_source_obj_id = " + Long.toString(selectedDataSourceId)
+ + " AND ((meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF.getValue()
+ + ") OR (meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()
+ + ") OR (meta_type = " + TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT.getValue() + "))"; //NON-NLS
+
+ try (SleuthkitCase.CaseDbQuery queryResult = skCase.executeQuery(getAllFilesQuery)) {
+ ResultSet resultSet = queryResult.getResultSet();
+
+ progressPanel.updateStatusLabel(Bundle.ReportCaseUco_processing());
+
+ // Loop files and write info to CASE-UCO report
+ while (resultSet.next()) {
+
+ if (progressPanel.getStatus() == ReportStatus.CANCELED) {
+ break;
+ }
+
+ Long objectId = resultSet.getLong(1);
+ String fileName = resultSet.getString(2);
+ long size = resultSet.getLong("size");
+ String crtime = ContentUtils.getStringTimeISO8601(resultSet.getLong("crtime"), timeZone);
+ String atime = ContentUtils.getStringTimeISO8601(resultSet.getLong("atime"), timeZone);
+ String mtime = ContentUtils.getStringTimeISO8601(resultSet.getLong("mtime"), timeZone);
+ String md5Hash = resultSet.getString("md5");
+ String parent_path = resultSet.getString("parent_path");
+ String mime_type = resultSet.getString("mime_type");
+ String extension = resultSet.getString("extension");
+
+ saveFileInCaseUcoFormat(objectId, fileName, parent_path, md5Hash, mime_type, size, crtime, atime, mtime, extension, jsonGenerator, dataSourceTraceId);
+ }
+ }
+
+ // create the required CASE-UCO entries at the end of the output file
+ finilizeJsonOutputFile(jsonGenerator);
+
+ Case.getCurrentCaseThrows().addReport(reportPath, Bundle.ReportCaseUco_srcModuleName_text(), "");
+
+ progressPanel.complete(ReportStatus.COMPLETE);
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Failed to get list of files from case database", ex); //NON-NLS
+ progressPanel.complete(ReportStatus.ERROR);
+ } catch (IOException ex) {
+ logger.log(Level.SEVERE, "Failed to create JSON output for the CASE-UCO report", ex); //NON-NLS
+ progressPanel.complete(ReportStatus.ERROR);
+ } catch (SQLException ex) {
+ logger.log(Level.WARNING, "Unable to read result set", ex); //NON-NLS
+ progressPanel.complete(ReportStatus.ERROR);
+ } catch (NoCurrentCaseException ex) {
+ logger.log(Level.SEVERE, "No current case open", ex); //NON-NLS
+ progressPanel.complete(ReportStatus.ERROR);
+ } finally {
+ if (jsonGenerator != null) {
+ try {
+ jsonGenerator.close();
+ } catch (IOException ex) {
+ logger.log(Level.WARNING, "Failed to close JSON output file", ex); //NON-NLS
+ }
+ }
+ }
+ }
+
+ private void initializeJsonOutputFile(JsonGenerator catalog) throws IOException {
+ catalog.writeStartObject();
+ catalog.writeFieldName("@graph");
+ catalog.writeStartArray();
+ }
+
+ private void finilizeJsonOutputFile(JsonGenerator catalog) throws IOException {
+ catalog.writeEndArray();
+ catalog.writeEndObject();
+ }
+
+ private String saveCaseInfo(SleuthkitCase skCase, JsonGenerator catalog) throws TskCoreException, SQLException, IOException, NoCurrentCaseException {
+
+ // create a "trace" entry for the Autopsy case iteself
+ String uniqueCaseName;
+ String dbFileName;
+ TskData.DbType dbType = skCase.getDatabaseType();
+ if (dbType == TskData.DbType.SQLITE) {
+ uniqueCaseName = Case.getCurrentCaseThrows().getName();
+ dbFileName = skCase.getDatabaseName();
+ } else {
+ uniqueCaseName = skCase.getDatabaseName();
+ dbFileName = "";
+ }
+
+ String caseDirPath = skCase.getDbDirPath();
+ String caseTraceId = "case-" + uniqueCaseName;
+ catalog.writeStartObject();
+ catalog.writeStringField("@id", caseTraceId);
+ catalog.writeStringField("@type", "Trace");
+
+ catalog.writeFieldName("propertyBundle");
+ catalog.writeStartArray();
+
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "File");
+ if (dbType == TskData.DbType.SQLITE) {
+ catalog.writeStringField("filePath", caseDirPath + java.io.File.separator + dbFileName);
+ catalog.writeBooleanField("isDirectory", false);
+ } else {
+ catalog.writeStringField("filePath", caseDirPath);
+ catalog.writeBooleanField("isDirectory", true);
+ }
+ catalog.writeEndObject();
+
+ catalog.writeEndArray();
+ catalog.writeEndObject();
+
+ return caseTraceId;
+ }
+
+ private String saveDataSourceInfo(Long selectedDataSourceId, String caseTraceId, SleuthkitCase skCase, JsonGenerator jsonGenerator) throws TskCoreException, SQLException, IOException {
+
+ Long imageSize = (long) 0;
+ String imageName = "";
+ boolean isImageDataSource = false;
+ String getImageDataSourceQuery = "select size from tsk_image_info where obj_id = " + selectedDataSourceId;
+ try (SleuthkitCase.CaseDbQuery queryResult = skCase.executeQuery(getImageDataSourceQuery)) {
+ ResultSet resultSet = queryResult.getResultSet();
+ // check if we got a result
+ while (resultSet.next()) {
+ // we got a result so the data source was an image data source
+ imageSize = resultSet.getLong(1);
+ isImageDataSource = true;
+ break;
+ }
+ }
+
+ if (isImageDataSource) {
+ // get caseDirPath to image file
+ String getPathToDataSourceQuery = "select name from tsk_image_names where obj_id = " + selectedDataSourceId;
+ try (SleuthkitCase.CaseDbQuery queryResult = skCase.executeQuery(getPathToDataSourceQuery)) {
+ ResultSet resultSet = queryResult.getResultSet();
+ while (resultSet.next()) {
+ imageName = resultSet.getString(1);
+ break;
+ }
+ }
+ } else {
+ // logical file data source
+ String getLogicalDataSourceQuery = "select name from tsk_files where obj_id = " + selectedDataSourceId;
+ try (SleuthkitCase.CaseDbQuery queryResult = skCase.executeQuery(getLogicalDataSourceQuery)) {
+ ResultSet resultSet = queryResult.getResultSet();
+ while (resultSet.next()) {
+ imageName = resultSet.getString(1);
+ break;
+ }
+ }
+ }
+
+ return saveDataSourceInCaseUcoFormat(jsonGenerator, imageName, imageSize, selectedDataSourceId, caseTraceId);
+ }
+
+ private String saveDataSourceInCaseUcoFormat(JsonGenerator catalog, String imageName, Long imageSize, Long selectedDataSourceId, String caseTraceId) throws IOException {
+
+ // create a "trace" entry for the data source
+ String dataSourceTraceId = "data-source-"+selectedDataSourceId;
+ catalog.writeStartObject();
+ catalog.writeStringField("@id", dataSourceTraceId);
+ catalog.writeStringField("@type", "Trace");
+
+ catalog.writeFieldName("propertyBundle");
+ catalog.writeStartArray();
+
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "File");
+ catalog.writeStringField("filePath", imageName);
+ catalog.writeEndObject();
+
+ if (imageSize > 0) {
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "ContentData");
+ catalog.writeStringField("sizeInBytes", Long.toString(imageSize));
+ catalog.writeEndObject();
+ }
+
+ catalog.writeEndArray();
+ catalog.writeEndObject();
+
+ // create a "relationship" entry between the case and the data source
+ catalog.writeStartObject();
+ catalog.writeStringField("@id", "relationship-" + caseTraceId);
+ catalog.writeStringField("@type", "Relationship");
+ catalog.writeStringField("source", dataSourceTraceId);
+ catalog.writeStringField("target", caseTraceId);
+ catalog.writeStringField("kindOfRelationship", "contained-within");
+ catalog.writeBooleanField("isDirectional", true);
+
+ catalog.writeFieldName("propertyBundle");
+ catalog.writeStartArray();
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "PathRelation");
+ catalog.writeStringField("path", imageName);
+ catalog.writeEndObject();
+ catalog.writeEndArray();
+
+ catalog.writeEndObject();
+
+ return dataSourceTraceId;
+ }
+
+ private void saveFileInCaseUcoFormat(Long objectId, String fileName, String parent_path, String md5Hash, String mime_type, long size, String ctime,
+ String atime, String mtime, String extension, JsonGenerator catalog, String dataSourceTraceId) throws IOException {
+
+ String fileTraceId = "file-" + objectId;
+
+ // create a "trace" entry for the file
+ catalog.writeStartObject();
+ catalog.writeStringField("@id", fileTraceId);
+ catalog.writeStringField("@type", "Trace");
+
+ catalog.writeFieldName("propertyBundle");
+ catalog.writeStartArray();
+
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "File");
+ catalog.writeStringField("createdTime", ctime);
+ catalog.writeStringField("accessedTime", atime);
+ catalog.writeStringField("modifiedTime", mtime);
+ if (extension != null) {
+ catalog.writeStringField("extension", extension);
+ }
+ catalog.writeStringField("fileName", fileName);
+ if (parent_path != null) {
+ catalog.writeStringField("filePath", parent_path + fileName);
+ }
+ catalog.writeBooleanField("isDirectory", false);
+ catalog.writeStringField("sizeInBytes", Long.toString(size));
+ catalog.writeEndObject();
+
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "ContentData");
+ if (mime_type != null) {
+ catalog.writeStringField("mimeType", mime_type);
+ }
+ if (md5Hash != null) {
+ catalog.writeFieldName("hash");
+ catalog.writeStartArray();
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "Hash");
+ catalog.writeStringField("hashMethod", "SHA256");
+ catalog.writeStringField("hashValue", md5Hash);
+ catalog.writeEndObject();
+ catalog.writeEndArray();
+ }
+ catalog.writeStringField("sizeInBytes", Long.toString(size));
+
+ catalog.writeEndObject();
+
+ catalog.writeEndArray();
+ catalog.writeEndObject();
+
+ // create a "relationship" entry between the file and the data source
+ catalog.writeStartObject();
+ catalog.writeStringField("@id", "relationship-" + objectId);
+ catalog.writeStringField("@type", "Relationship");
+ catalog.writeStringField("source", fileTraceId);
+ catalog.writeStringField("target", dataSourceTraceId);
+ catalog.writeStringField("kindOfRelationship", "contained-within");
+ catalog.writeBooleanField("isDirectional", true);
+
+ catalog.writeFieldName("propertyBundle");
+ catalog.writeStartArray();
+ catalog.writeStartObject();
+ catalog.writeStringField("@type", "PathRelation");
+ if (parent_path != null) {
+ catalog.writeStringField("path", parent_path + fileName);
+ } else {
+ catalog.writeStringField("path", fileName);
+ }
+ catalog.writeEndObject();
+ catalog.writeEndArray();
+
+ catalog.writeEndObject();
+ }
+
+ @Override
+ public String getName() {
+ String name = NbBundle.getMessage(this.getClass(), "ReportCaseUco.getName.text");
+ return name;
+ }
+
+ @Override
+ public String getRelativeFilePath() {
+ return REPORT_FILE_NAME;
+ }
+
+ @Override
+ public String getDescription() {
+ String desc = NbBundle.getMessage(this.getClass(), "ReportCaseUco.getDesc.text");
+ return desc;
+ }
+
+ @Override
+ public JPanel getConfigurationPanel() {
+ try {
+ configPanel = new ReportCaseUcoConfigPanel();
+ } catch (NoCurrentCaseException | TskCoreException | SQLException ex) {
+ logger.log(Level.SEVERE, "Failed to initialize CASE-UCO settings panel", ex); //NON-NLS
+ MessageNotifyUtil.Message.error(Bundle.ReportCaseUco_notInitialized());
+ configPanel = null;
+ }
+ return configPanel;
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.form b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.form
new file mode 100755
index 0000000000..9bec50a37e
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.form
@@ -0,0 +1,67 @@
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.java b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.java
new file mode 100755
index 0000000000..4ed5a8f624
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/case_uco/ReportCaseUcoConfigPanel.java
@@ -0,0 +1,133 @@
+/*
+ *
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.case_uco;
+
+import java.sql.SQLException;
+import java.util.Map;
+import java.util.Map.Entry;
+import javax.swing.ComboBoxModel;
+import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.guiutils.DataSourceLoader;
+import org.sleuthkit.autopsy.guiutils.DataSourceComboBoxModel;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * UI controls for CASE-UCO report. It is a panel which provides the
+ * ability to select a single datasource from a dropdown list
+ * of sources in the current case.
+ */
+final class ReportCaseUcoConfigPanel extends javax.swing.JPanel {
+
+ private static final long serialVersionUID = 1L;
+ static final long NO_DATA_SOURCE_SELECTED = -1;
+ private ComboBoxModel dataSourcesList = new DataSourceComboBoxModel();
+ private final Map dataSourceMap;
+ private final DataSourceLoader dataSourceLoader;
+
+ /**
+ * Creates new form ReportCaseUcoConfigPanel
+ */
+ ReportCaseUcoConfigPanel() throws NoCurrentCaseException, TskCoreException, SQLException {
+ initComponents();
+ this.dataSourceLoader = new DataSourceLoader();
+ this.dataSourceMap = dataSourceLoader.getDataSourceMap();
+
+ String[] dataSourcesNames = new String[dataSourceMap.size()];
+ if (dataSourcesNames.length > 0) {
+ dataSourcesNames = dataSourceMap.values().toArray(dataSourcesNames);
+ setDatasourceComboboxModel(new DataSourceComboBoxModel(dataSourcesNames));
+
+ selectDataSourceComboBox.setEnabled(true);
+ selectDataSourceComboBox.setSelectedIndex(0);
+ }
+ }
+
+ /**
+ * Get the ID for the datasource selected in the combo box.
+ *
+ * @return the selected datasource ID or
+ ReportCaseUcoConfigPanel.NO_DATA_SOURCE_SELECTED if none is selected
+ */
+ Long getSelectedDataSourceId() {
+ for (Entry entry : this.dataSourceMap.entrySet()) {
+ if (entry.getValue().equals(this.selectDataSourceComboBox.getSelectedItem())) {
+ return entry.getKey();
+ }
+ }
+ return ReportCaseUcoConfigPanel.NO_DATA_SOURCE_SELECTED;
+ }
+
+ /**
+ * This method is called from within the constructor to initialize the form.
+ * WARNING: Do NOT modify this code. The content of this method is always
+ * regenerated by the Form Editor.
+ */
+ @SuppressWarnings("unchecked")
+ // //GEN-BEGIN:initComponents
+ private void initComponents() {
+
+ selectDataSourceComboBox = new javax.swing.JComboBox<>();
+ jLabelSelectDataSource = new javax.swing.JLabel();
+
+ selectDataSourceComboBox.setModel(dataSourcesList);
+ selectDataSourceComboBox.setEnabled(false);
+
+ jLabelSelectDataSource.setText(org.openide.util.NbBundle.getMessage(ReportCaseUcoConfigPanel.class, "ReportCaseUcoConfigPanel.jLabelSelectDataSource.text")); // NOI18N
+
+ javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
+ this.setLayout(layout);
+ layout.setHorizontalGroup(
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addGap(21, 21, 21)
+ .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addComponent(jLabelSelectDataSource)
+ .addGap(0, 0, Short.MAX_VALUE))
+ .addComponent(selectDataSourceComboBox, 0, 348, Short.MAX_VALUE))
+ .addContainerGap())
+ );
+ layout.setVerticalGroup(
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addGap(11, 11, 11)
+ .addComponent(jLabelSelectDataSource)
+ .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+ .addComponent(selectDataSourceComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
+ .addContainerGap(130, Short.MAX_VALUE))
+ );
+ }// //GEN-END:initComponents
+
+ // Variables declaration - do not modify//GEN-BEGIN:variables
+ private javax.swing.JLabel jLabelSelectDataSource;
+ private javax.swing.JComboBox selectDataSourceComboBox;
+ // End of variables declaration//GEN-END:variables
+
+ /**
+ * Set the datamodel for the combo box which displays the data sources in
+ * the current case
+ *
+ * @param dataSourceComboBoxModel the DataSourceComboBoxModel to use
+ */
+ void setDatasourceComboboxModel(DataSourceComboBoxModel dataSourceComboBoxModel) {
+ this.dataSourcesList = dataSourceComboBoxModel;
+ this.selectDataSourceComboBox.setModel(dataSourcesList);
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle.properties
new file mode 100644
index 0000000000..30624f41bb
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle.properties
@@ -0,0 +1,19 @@
+OpenIDE-Module-Name=ewfVerify
+DataSourceIntegrityModuleFactory.moduleName.text=Data Source Integrity
+DataSourceIntegrityModuleFactory.moduleDesc.text=Calculates and validates hashes of data sources.
+DataSourceIntegrityIngestModule.process.errProcImg=Error processing {0}
+DataSourceIntegrityIngestModule.process.skipNonEwf=Skipping non-disk image data source {0}
+DataSourceIntegrityIngestModule.process.noStoredHash=Image {0} does not have stored hash.
+DataSourceIntegrityIngestModule.process.startingImg=Starting {0}
+DataSourceIntegrityIngestModule.process.errGetSizeOfImg=Error getting size of {0}. Image will not be processed.
+DataSourceIntegrityIngestModule.process.errReadImgAtChunk=Error reading {0} at chunk {1}
+DataSourceIntegrityIngestModule.shutDown.verified=\ verified
+DataSourceIntegrityIngestModule.shutDown.notVerified=\ not verified
+DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader=Data Source Verification Results for {0}
+DataSourceIntegrityIngestModule.shutDown.resultLi=Result\:{0}
+DataSourceIntegrityIngestModule.shutDown.calcHashLi=Calculated hash\: {0}
+DataSourceIntegrityIngestModule.shutDown.storedHashLi=Stored hash\: {0}
+DataSourceIntegrityIngestSettingsPanel.computeHashesCheckbox.text=Calculate data source hashes if none are present
+DataSourceIntegrityIngestSettingsPanel.jLabel1.text=Note that this module will not run on logical files
+DataSourceIntegrityIngestSettingsPanel.jLabel3.text=Ingest Settings
+DataSourceIntegrityIngestSettingsPanel.verifyHashesCheckbox.text=Verify existing data source hashes
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle_ja.properties
new file mode 100644
index 0000000000..e0028e2703
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/Bundle_ja.properties
@@ -0,0 +1,15 @@
+OpenIDE-Module-Name=EWFVerify
+DataSourceIntegrityIngestModule.process.errProcImg={0}\u3092\u51e6\u7406\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f
+DataSourceIntegrityModuleFactory.moduleName.text=E01\u8a8d\u8a3c\u30c4\u30fc\u30eb
+DataSourceIntegrityModuleFactory.moduleDesc.text=E01\u30d5\u30a1\u30a4\u30eb\u306e\u6574\u5408\u6027\u3092\u8a8d\u8a3c\u3057\u307e\u3059\u3002
+DataSourceIntegrityIngestModule.process.skipNonEwf=E01\u30a4\u30e1\u30fc\u30b8\u3067\u306f\u306a\u3044{0}\u3092\u30b9\u30ad\u30c3\u30d7\u3057\u3066\u3044\u307e\u3059
+DataSourceIntegrityIngestModule.process.noStoredHash=\u30a4\u30e1\u30fc\u30b8{0}\u306f\u4fdd\u5b58\u3055\u308c\u3066\u3044\u308b\u30cf\u30c3\u30b7\u30e5\u304c\u3042\u308a\u307e\u305b\u3093\u3002
+DataSourceIntegrityIngestModule.process.startingImg={0}\u3092\u958b\u59cb\u4e2d
+DataSourceIntegrityIngestModule.process.errGetSizeOfImg={0}\u306e\u30b5\u30a4\u30ba\u306e\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30a4\u30e1\u30fc\u30b8\u306f\u51e6\u7406\u3055\u308c\u307e\u305b\u3093\u3002
+DataSourceIntegrityIngestModule.process.errReadImgAtChunk={0}\u306e\u30c1\u30e3\u30f3\u30af{1}\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f
+DataSourceIntegrityIngestModule.shutDown.calcHashLi=\u8a08\u7b97\u3055\u308c\u305f\u30cf\u30c3\u30b7\u30e5\u5024\uff1a{0}
+DataSourceIntegrityIngestModule.shutDown.notVerified=\u8a8d\u8a3c\u3067\u304d\u307e\u305b\u3093\u3067\u3057\u305f
+DataSourceIntegrityIngestModule.shutDown.resultLi=\u7d50\u679c\uff1a{0}
+DataSourceIntegrityIngestModule.shutDown.storedHashLi=\u4fdd\u5b58\u3055\u308c\u305f\u30cf\u30c3\u30b7\u30e5\uff1a {0}
+DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader={0}\u306eEWF\u30d9\u30ea\u30d5\u30a3\u30b1\u30fc\u30b7\u30e7\u30f3\u7d50\u679c
+EwfVerifyIDataSourceIntegrityIngestModulengestModule.shutDown.verified=\u8a8d\u8a3c\u3055\u308c\u307e\u3057\u305f
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java
new file mode 100644
index 0000000000..21dcf95d65
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java
@@ -0,0 +1,362 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2013-2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.dataSourceIntegrity;
+
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import javax.xml.bind.DatatypeConverter;
+import java.util.Arrays;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
+import org.sleuthkit.autopsy.ingest.IngestJobContext;
+import org.sleuthkit.autopsy.ingest.IngestMessage;
+import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
+import org.sleuthkit.autopsy.ingest.IngestServices;
+import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.TskCoreException;
+import org.openide.util.NbBundle;
+
+/**
+ * Data source ingest module that verifies the integrity of an Expert Witness
+ * Format (EWF) E01 image file by generating a hash of the file and comparing it
+ * to the value stored in the image. Will also generate hashes for any image-type
+ * data source that has none.
+ */
+public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
+
+ private static final Logger logger = Logger.getLogger(DataSourceIntegrityIngestModule.class.getName());
+ private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
+ private static final IngestServices services = IngestServices.getInstance();
+
+ private final boolean computeHashes;
+ private final boolean verifyHashes;
+
+ private final List hashDataList = new ArrayList<>();
+
+ private IngestJobContext context;
+
+ DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) {
+ computeHashes = settings.shouldComputeHashes();
+ verifyHashes = settings.shouldVerifyHashes();
+ }
+
+ @NbBundle.Messages({
+ "DataSourceIntegrityIngestModule.startup.noCheckboxesSelected=At least one of the checkboxes must be selected"
+ })
+ @Override
+ public void startUp(IngestJobContext context) throws IngestModuleException {
+ this.context = context;
+
+ // It's an error if the module is run without either option selected
+ if (!(computeHashes || verifyHashes)) {
+ throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected());
+ }
+ }
+
+ @NbBundle.Messages({
+ "# {0} - imageName",
+ "DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled",
+ "# {0} - imageName",
+ "DataSourceIntegrityIngestModule.process.skipVerify=Not verifying existing hashes for {0} since the option was disabled",
+ "# {0} - hashName",
+ "DataSourceIntegrityIngestModule.process.hashAlgorithmError=Error creating message digest for {0} algorithm",
+ "# {0} - hashName",
+ "DataSourceIntegrityIngestModule.process.hashMatch={0} hash verified ",
+ "# {0} - hashName",
+ "DataSourceIntegrityIngestModule.process.hashNonMatch={0} hash not verified ",
+ "# {0} - calculatedHashValue",
+ "# {1} - storedHashValue",
+ "DataSourceIntegrityIngestModule.process.hashList=- Calculated hash: {0}
- Stored hash: {1}
",
+ "# {0} - hashName",
+ "# {1} - calculatedHashValue",
+ "DataSourceIntegrityIngestModule.process.calcHashWithType=Calculated {0} hash: {1} ",
+ "# {0} - imageName",
+ "DataSourceIntegrityIngestModule.process.calculateHashDone=Data Source Hash Calculation Results for {0}
",
+ "DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated",
+ "# {0} - imageName",
+ "DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database",
+ "# {0} - imageName",
+ "DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database",
+ })
+ @Override
+ public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
+ String imgName = dataSource.getName();
+
+ // Skip non-images
+ if (!(dataSource instanceof Image)) {
+ logger.log(Level.INFO, "Skipping non-image {0}", imgName); //NON-NLS
+ services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
+ NbBundle.getMessage(this.getClass(),
+ "DataSourceIntegrityIngestModule.process.skipNonEwf",
+ imgName)));
+ return ProcessResult.OK;
+ }
+ Image img = (Image) dataSource;
+
+ // Make sure the image size we have is not zero
+ long size = img.getSize();
+ if (size == 0) {
+ logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS
+ services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(),
+ NbBundle.getMessage(this.getClass(),
+ "DataSourceIntegrityIngestModule.process.errGetSizeOfImg",
+ imgName)));
+ }
+
+ // Determine which mode we're in.
+ // - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected)
+ // - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected)
+
+ // First get a list of all stored hash types
+ try {
+ if (img.getMd5() != null && ! img.getMd5().isEmpty()) {
+ hashDataList.add(new HashData(HashType.MD5, img.getMd5()));
+ }
+ if (img.getSha1() != null && ! img.getSha1().isEmpty()) {
+ hashDataList.add(new HashData(HashType.SHA1, img.getSha1()));
+ }
+ if (img.getSha256() != null && ! img.getSha256().isEmpty()) {
+ hashDataList.add(new HashData(HashType.SHA256, img.getSha256()));
+ }
+ } catch (TskCoreException ex) {
+ String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName);
+ services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
+ logger.log(Level.SEVERE, msg, ex);
+ return ProcessResult.ERROR;
+ }
+
+ // Figure out which mode we should be in
+ Mode mode;
+ if (hashDataList.isEmpty()) {
+ mode = Mode.COMPUTE;
+ } else {
+ mode = Mode.VERIFY;
+ }
+
+ // If that mode was not enabled by the user, exit
+ if (mode.equals(Mode.COMPUTE) && ! this.computeHashes) {
+ logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS
+ services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
+ Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName)));
+ return ProcessResult.OK;
+ } else if (mode.equals(Mode.VERIFY) && ! this.verifyHashes) {
+ logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS
+ services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
+ Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName)));
+ return ProcessResult.OK;
+ }
+
+ // If we're in compute mode (i.e., the hash list is empty), add all hash algorithms
+ // to the list.
+ if (mode.equals(Mode.COMPUTE)) {
+ for(HashType type : HashType.values()) {
+ hashDataList.add(new HashData(type, ""));
+ }
+ }
+
+ // Set up the digests
+ for (HashData hashData:hashDataList) {
+ try {
+ hashData.digest = MessageDigest.getInstance(hashData.type.getName());
+ } catch (NoSuchAlgorithmException ex) {
+ String msg = Bundle.DataSourceIntegrityIngestModule_process_hashAlgorithmError(hashData.type.getName());
+ services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
+ logger.log(Level.SEVERE, msg, ex);
+ return ProcessResult.ERROR;
+ }
+ }
+
+ // Libewf uses a chunk size of 64 times the sector size, which is the
+ // motivation for using it here. For other images it shouldn't matter,
+ // so they can use this chunk size as well.
+ long chunkSize = 64 * img.getSsize();
+ chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
+
+ // Casting to double to capture decimals
+ int totalChunks = (int) Math.ceil((double) size / (double) chunkSize);
+ logger.log(Level.INFO, "Total chunks = {0}", totalChunks); //NON-NLS
+
+ if (mode.equals(Mode.VERIFY)) {
+ logger.log(Level.INFO, "Starting hash verification of {0}", img.getName()); //NON-NLS
+ } else {
+ logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS
+ }
+ services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
+ NbBundle.getMessage(this.getClass(),
+ "DataSourceIntegrityIngestModule.process.startingImg",
+ imgName)));
+
+ // Set up the progress bar
+ statusHelper.switchToDeterminate(totalChunks);
+
+ // Read in byte size chunks and update the hash value with the data.
+ byte[] data = new byte[(int) chunkSize];
+ int read;
+ for (int i = 0; i < totalChunks; i++) {
+ if (context.dataSourceIngestIsCancelled()) {
+ return ProcessResult.OK;
+ }
+ try {
+ read = img.read(data, i * chunkSize, chunkSize);
+ } catch (TskCoreException ex) {
+ String msg = NbBundle.getMessage(this.getClass(),
+ "DataSourceIntegrityIngestModule.process.errReadImgAtChunk", imgName, i);
+ services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
+ logger.log(Level.SEVERE, msg, ex);
+ return ProcessResult.ERROR;
+ }
+
+ // Only update with the read bytes.
+ if (read == chunkSize) {
+ for (HashData struct:hashDataList) {
+ struct.digest.update(data);
+ }
+ } else {
+ byte[] subData = Arrays.copyOfRange(data, 0, read);
+ for (HashData struct:hashDataList) {
+ struct.digest.update(subData);
+ }
+ }
+ statusHelper.progress(i);
+ }
+
+ // Produce the final hashes
+ for(HashData hashData:hashDataList) {
+ hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase();
+ logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS
+ }
+
+ if (mode.equals(Mode.VERIFY)) {
+ // Check that each hash matches
+ boolean verified = true;
+ String detailedResults = NbBundle
+ .getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName);
+ String hashResults = "";
+
+ for (HashData hashData:hashDataList) {
+ if (hashData.storedHash.equals(hashData.calculatedHash)) {
+ hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name);
+ } else {
+ verified = false;
+ hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name);
+ }
+ hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash);
+ }
+
+ String verificationResultStr;
+ MessageType messageType;
+ if (verified) {
+ messageType = MessageType.INFO;
+ verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verified");
+ } else {
+ messageType = MessageType.WARNING;
+ verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified");
+ }
+
+ detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr);
+ detailedResults += hashResults;
+
+ services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
+ imgName + verificationResultStr, detailedResults));
+
+ } else {
+ // Store the hashes in the database and update the image
+ try {
+ String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName);
+
+ for (HashData hashData:hashDataList) {
+ switch (hashData.type) {
+ case MD5:
+ img.setMD5(hashData.calculatedHash);
+ break;
+ case SHA1:
+ img.setSha1(hashData.calculatedHash);
+ break;
+ case SHA256:
+ img.setSha256(hashData.calculatedHash);
+ break;
+ default:
+ break;
+ }
+ results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash);
+ }
+
+ // Write the inbox message
+ services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
+ imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results));
+
+ } catch (TskCoreException ex) {
+ String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName);
+ services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
+ logger.log(Level.SEVERE, "Error saving hash for image " + imgName + " to database", ex);
+ return ProcessResult.ERROR;
+ }
+ }
+
+ return ProcessResult.OK;
+ }
+
+ /**
+ * Enum to track whether we're in computer or verify mode
+ */
+ private enum Mode {
+ COMPUTE,
+ VERIFY;
+ }
+
+ /**
+ * Enum to hold the type of hash.
+ * The value in the "name" field should be compatible with MessageDigest
+ */
+ private enum HashType {
+ MD5("MD5"),
+ SHA1("SHA-1"),
+ SHA256("SHA-256");
+
+ private final String name; // This should be the string expected by MessageDigest
+
+ HashType(String name) {
+ this.name = name;
+ }
+
+ String getName() {
+ return name;
+ }
+ }
+
+ /**
+ * Utility class to hold data for a specific hash algorithm.
+ */
+ private class HashData {
+ private HashType type;
+ private MessageDigest digest;
+ private String storedHash;
+ private String calculatedHash;
+
+ HashData(HashType type, String storedHash) {
+ this.type = type;
+ this.storedHash = storedHash;
+ }
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettings.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettings.java
new file mode 100644
index 0000000000..e5e4bf3d50
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettings.java
@@ -0,0 +1,96 @@
+/*
+ * Central Repository
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.dataSourceIntegrity;
+
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+
+/**
+ * Ingest job settings for the E01 Verify module.
+ */
+final class DataSourceIntegrityIngestSettings implements IngestModuleIngestJobSettings {
+
+ private static final long serialVersionUID = 1L;
+
+ static final boolean DEFAULT_COMPUTE_HASHES = true;
+ static final boolean DEFAULT_VERIFY_HASHES = true;
+
+ private boolean computeHashes;
+ private boolean verifyHashes;
+
+ /**
+ * Instantiate the ingest job settings with default values.
+ */
+ DataSourceIntegrityIngestSettings() {
+ this.computeHashes = DEFAULT_COMPUTE_HASHES;
+ this.verifyHashes = DEFAULT_VERIFY_HASHES;
+ }
+
+ /**
+ * Instantiate the ingest job settings.
+ *
+ * @param computeHashes Compute hashes if none are present
+ * @param verifyHashes Verify hashes if any are present
+ */
+ DataSourceIntegrityIngestSettings(boolean computeHashes, boolean verifyHashes) {
+ this.computeHashes = computeHashes;
+ this.verifyHashes = verifyHashes;
+ }
+
+ @Override
+ public long getVersionNumber() {
+ return serialVersionUID;
+ }
+
+ /**
+ * Should hashes be computed if none are present?
+ *
+ * @return true if hashes should be computed, false otherwise
+ */
+ boolean shouldComputeHashes() {
+ return computeHashes;
+ }
+
+ /**
+ * Set whether hashes should be computed.
+ *
+ * @param computeHashes true if hashes should be computed
+ */
+ void setComputeHashes(boolean computeHashes) {
+ this.computeHashes = computeHashes;
+ }
+
+
+ /**
+ * Should hashes be verified if at least one is present?
+ *
+ * @return true if hashes should be verified, false otherwise
+ */
+ boolean shouldVerifyHashes() {
+ return verifyHashes;
+ }
+
+ /**
+ * Set whether hashes should be verified.
+ *
+ * @param verifyHashes true if hashes should be verified
+ */
+ void setVerifyHashes(boolean verifyHashes) {
+ this.verifyHashes = verifyHashes;
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.form
new file mode 100644
index 0000000000..b9fc6f08b1
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.form
@@ -0,0 +1,83 @@
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.java
new file mode 100644
index 0000000000..30e3b11b05
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestSettingsPanel.java
@@ -0,0 +1,120 @@
+/*
+ * Central Repository
+ *
+ * Copyright 2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.dataSourceIntegrity;
+
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
+
+/**
+ * Ingest job settings panel for the Correlation Engine module.
+ */
+@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
+final class DataSourceIntegrityIngestSettingsPanel extends IngestModuleIngestJobSettingsPanel {
+
+ /**
+ * Creates new form DataSourceIntegrityIngestSettingsPanel
+ */
+ public DataSourceIntegrityIngestSettingsPanel(DataSourceIntegrityIngestSettings settings) {
+ initComponents();
+ customizeComponents(settings);
+ }
+
+ /**
+ * Update components with values from the ingest job settings.
+ *
+ * @param settings The ingest job settings.
+ */
+ private void customizeComponents(DataSourceIntegrityIngestSettings settings) {
+ computeHashesCheckbox.setSelected(settings.shouldComputeHashes());
+ verifyHashesCheckbox.setSelected(settings.shouldVerifyHashes());
+ }
+
+ @Override
+ public IngestModuleIngestJobSettings getSettings() {
+ return new DataSourceIntegrityIngestSettings(computeHashesCheckbox.isSelected(), verifyHashesCheckbox.isSelected());
+ }
+
+ /**
+ * This method is called from within the constructor to initialize the form.
+ * WARNING: Do NOT modify this code. The content of this method is always
+ * regenerated by the Form Editor.
+ */
+ @SuppressWarnings("unchecked")
+ // //GEN-BEGIN:initComponents
+ private void initComponents() {
+
+ computeHashesCheckbox = new javax.swing.JCheckBox();
+ verifyHashesCheckbox = new javax.swing.JCheckBox();
+ jLabel3 = new javax.swing.JLabel();
+ jLabel1 = new javax.swing.JLabel();
+
+ org.openide.awt.Mnemonics.setLocalizedText(computeHashesCheckbox, org.openide.util.NbBundle.getMessage(DataSourceIntegrityIngestSettingsPanel.class, "DataSourceIntegrityIngestSettingsPanel.computeHashesCheckbox.text")); // NOI18N
+ computeHashesCheckbox.addActionListener(new java.awt.event.ActionListener() {
+ public void actionPerformed(java.awt.event.ActionEvent evt) {
+ computeHashesCheckboxActionPerformed(evt);
+ }
+ });
+
+ org.openide.awt.Mnemonics.setLocalizedText(verifyHashesCheckbox, org.openide.util.NbBundle.getMessage(DataSourceIntegrityIngestSettingsPanel.class, "DataSourceIntegrityIngestSettingsPanel.verifyHashesCheckbox.text")); // NOI18N
+
+ jLabel3.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
+ org.openide.awt.Mnemonics.setLocalizedText(jLabel3, org.openide.util.NbBundle.getMessage(DataSourceIntegrityIngestSettingsPanel.class, "DataSourceIntegrityIngestSettingsPanel.jLabel3.text")); // NOI18N
+
+ org.openide.awt.Mnemonics.setLocalizedText(jLabel1, org.openide.util.NbBundle.getMessage(DataSourceIntegrityIngestSettingsPanel.class, "DataSourceIntegrityIngestSettingsPanel.jLabel1.text")); // NOI18N
+
+ javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
+ this.setLayout(layout);
+ layout.setHorizontalGroup(
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addContainerGap()
+ .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addComponent(jLabel1)
+ .addComponent(verifyHashesCheckbox)
+ .addComponent(computeHashesCheckbox)
+ .addComponent(jLabel3))
+ .addContainerGap(47, Short.MAX_VALUE))
+ );
+ layout.setVerticalGroup(
+ layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
+ .addGroup(layout.createSequentialGroup()
+ .addContainerGap()
+ .addComponent(jLabel3)
+ .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
+ .addComponent(computeHashesCheckbox)
+ .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
+ .addComponent(verifyHashesCheckbox)
+ .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
+ .addComponent(jLabel1)
+ .addContainerGap(53, Short.MAX_VALUE))
+ );
+ }// //GEN-END:initComponents
+
+ private void computeHashesCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_computeHashesCheckboxActionPerformed
+ // TODO add your handling code here:
+ }//GEN-LAST:event_computeHashesCheckboxActionPerformed
+
+ // Variables declaration - do not modify//GEN-BEGIN:variables
+ private javax.swing.JCheckBox computeHashesCheckbox;
+ private javax.swing.JLabel jLabel1;
+ private javax.swing.JLabel jLabel3;
+ private javax.swing.JCheckBox verifyHashesCheckbox;
+ // End of variables declaration//GEN-END:variables
+
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityModuleFactory.java
new file mode 100644
index 0000000000..0773672e2b
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityModuleFactory.java
@@ -0,0 +1,103 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2014-2018 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.dataSourceIntegrity;
+
+import org.openide.util.NbBundle;
+import org.openide.util.lookup.ServiceProvider;
+import org.sleuthkit.autopsy.coreutils.Version;
+import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
+import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
+import org.sleuthkit.autopsy.ingest.NoIngestModuleIngestJobSettings;
+
+/**
+ * An factory that creates data source ingest modules that verify the integrity
+ * of Expert Witness Format (EWF), i.e., .e01 files .
+ */
+@ServiceProvider(service = IngestModuleFactory.class)
+public class DataSourceIntegrityModuleFactory extends IngestModuleFactoryAdapter {
+
+ static String getModuleName() {
+ return NbBundle.getMessage(DataSourceIntegrityIngestModule.class,
+ "DataSourceIntegrityModuleFactory.moduleName.text");
+ }
+
+ @Override
+ public String getModuleDisplayName() {
+ return getModuleName();
+ }
+
+ @Override
+ public String getModuleDescription() {
+ return NbBundle.getMessage(DataSourceIntegrityIngestModule.class,
+ "DataSourceIntegrityModuleFactory.moduleDesc.text");
+ }
+
+ @Override
+ public String getModuleVersionNumber() {
+ return Version.getVersion();
+ }
+
+ @Override
+ public boolean isDataSourceIngestModuleFactory() {
+ return true;
+ }
+
+ @Override
+ public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings settings) {
+ if (settings instanceof DataSourceIntegrityIngestSettings) {
+ return new DataSourceIntegrityIngestModule((DataSourceIntegrityIngestSettings) settings);
+ }
+ /*
+ * Compatibility check for older versions.
+ */
+ if (settings instanceof NoIngestModuleIngestJobSettings) {
+ return new DataSourceIntegrityIngestModule(new DataSourceIntegrityIngestSettings());
+ }
+
+ throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings");
+ }
+
+ @Override
+ public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
+ return new DataSourceIntegrityIngestSettings();
+ }
+
+ @Override
+ public boolean hasIngestJobSettingsPanel() {
+ return true;
+ }
+
+ @Override
+ public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
+ if (settings instanceof DataSourceIntegrityIngestSettings) {
+ return new DataSourceIntegrityIngestSettingsPanel((DataSourceIntegrityIngestSettings) settings);
+ }
+ /*
+ * Compatibility check for older versions.
+ */
+ if (settings instanceof NoIngestModuleIngestJobSettings) {
+ return new DataSourceIntegrityIngestSettingsPanel(new DataSourceIntegrityIngestSettings());
+ }
+
+ throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings");
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle.properties
deleted file mode 100644
index 04f02cd579..0000000000
--- a/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle.properties
+++ /dev/null
@@ -1,15 +0,0 @@
-OpenIDE-Module-Name=ewfVerify
-EwfVerifyIngestModule.moduleName.text=E01 Verifier
-EwfVerifyIngestModule.moduleDesc.text=Validates the integrity of E01 files.
-EwfVerifyIngestModule.process.errProcImg=Error processing {0}
-EwfVerifyIngestModule.process.skipNonEwf=Skipping non-E01 image {0}
-EwfVerifyIngestModule.process.noStoredHash=Image {0} does not have stored hash.
-EwfVerifyIngestModule.process.startingImg=Starting {0}
-EwfVerifyIngestModule.process.errGetSizeOfImg=Error getting size of {0}. Image will not be processed.
-EwfVerifyIngestModule.process.errReadImgAtChunk=Error reading {0} at chunk {1}
-EwfVerifyIngestModule.shutDown.verified=\ verified
-EwfVerifyIngestModule.shutDown.notVerified=\ not verified
-EwfVerifyIngestModule.shutDown.verifyResultsHeader=EWF Verification Results for {0}
-EwfVerifyIngestModule.shutDown.resultLi=Result\:{0}
-EwfVerifyIngestModule.shutDown.calcHashLi=Calculated hash\: {0}
-EwfVerifyIngestModule.shutDown.storedHashLi=Stored hash\: {0}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle_ja.properties
deleted file mode 100644
index 1bd226b8db..0000000000
--- a/Core/src/org/sleuthkit/autopsy/modules/e01verify/Bundle_ja.properties
+++ /dev/null
@@ -1,15 +0,0 @@
-OpenIDE-Module-Name=EWFVerify
-EwfVerifyIngestModule.process.errProcImg={0}\u3092\u51e6\u7406\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f
-EwfVerifyIngestModule.moduleName.text=E01\u8a8d\u8a3c\u30c4\u30fc\u30eb
-EwfVerifyIngestModule.moduleDesc.text=E01\u30d5\u30a1\u30a4\u30eb\u306e\u6574\u5408\u6027\u3092\u8a8d\u8a3c\u3057\u307e\u3059\u3002
-EwfVerifyIngestModule.process.skipNonEwf=E01\u30a4\u30e1\u30fc\u30b8\u3067\u306f\u306a\u3044{0}\u3092\u30b9\u30ad\u30c3\u30d7\u3057\u3066\u3044\u307e\u3059
-EwfVerifyIngestModule.process.noStoredHash=\u30a4\u30e1\u30fc\u30b8{0}\u306f\u4fdd\u5b58\u3055\u308c\u3066\u3044\u308b\u30cf\u30c3\u30b7\u30e5\u304c\u3042\u308a\u307e\u305b\u3093\u3002
-EwfVerifyIngestModule.process.startingImg={0}\u3092\u958b\u59cb\u4e2d
-EwfVerifyIngestModule.process.errGetSizeOfImg={0}\u306e\u30b5\u30a4\u30ba\u306e\u53d6\u5f97\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30a4\u30e1\u30fc\u30b8\u306f\u51e6\u7406\u3055\u308c\u307e\u305b\u3093\u3002
-EwfVerifyIngestModule.process.errReadImgAtChunk={0}\u306e\u30c1\u30e3\u30f3\u30af{1}\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f
-EwfVerifyIngestModule.shutDown.calcHashLi=\u8a08\u7b97\u3055\u308c\u305f\u30cf\u30c3\u30b7\u30e5\u5024\uff1a{0}
-EwfVerifyIngestModule.shutDown.notVerified=\u8a8d\u8a3c\u3067\u304d\u307e\u305b\u3093\u3067\u3057\u305f
-EwfVerifyIngestModule.shutDown.resultLi=\u7d50\u679c\uff1a{0}
-EwfVerifyIngestModule.shutDown.storedHashLi=\u4fdd\u5b58\u3055\u308c\u305f\u30cf\u30c3\u30b7\u30e5\uff1a {0}
-EwfVerifyIngestModule.shutDown.verifyResultsHeader={0}\u306eEWF\u30d9\u30ea\u30d5\u30a3\u30b1\u30fc\u30b7\u30e7\u30f3\u7d50\u679c
-EwfVerifyIngestModule.shutDown.verified=\u8a8d\u8a3c\u3055\u308c\u307e\u3057\u305f
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifierModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifierModuleFactory.java
deleted file mode 100644
index 0b4233d86a..0000000000
--- a/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifierModuleFactory.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Autopsy Forensic Browser
- *
- * Copyright 2014 Basis Technology Corp.
- * Contact: carrier sleuthkit org
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.sleuthkit.autopsy.modules.e01verify;
-
-import org.openide.util.NbBundle;
-import org.openide.util.lookup.ServiceProvider;
-import org.sleuthkit.autopsy.coreutils.Version;
-import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
-import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
-import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
-
-/**
- * An factory that creates data source ingest modules that verify the integrity
- * of Expert Witness Format (EWF), i.e., .e01 files .
- */
-@ServiceProvider(service = IngestModuleFactory.class)
-public class E01VerifierModuleFactory extends IngestModuleFactoryAdapter {
-
- static String getModuleName() {
- return NbBundle.getMessage(E01VerifyIngestModule.class,
- "EwfVerifyIngestModule.moduleName.text");
- }
-
- @Override
- public String getModuleDisplayName() {
- return getModuleName();
- }
-
- @Override
- public String getModuleDescription() {
- return NbBundle.getMessage(E01VerifyIngestModule.class,
- "EwfVerifyIngestModule.moduleDesc.text");
- }
-
- @Override
- public String getModuleVersionNumber() {
- return Version.getVersion();
- }
-
- @Override
- public boolean isDataSourceIngestModuleFactory() {
- return true;
- }
-
- @Override
- public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings ingestOptions) {
- return new E01VerifyIngestModule();
- }
-}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifyIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifyIngestModule.java
deleted file mode 100644
index 27a189f478..0000000000
--- a/Core/src/org/sleuthkit/autopsy/modules/e01verify/E01VerifyIngestModule.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * Autopsy Forensic Browser
- *
- * Copyright 2013-2014 Basis Technology Corp.
- * Contact: carrier sleuthkit org
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.sleuthkit.autopsy.modules.e01verify;
-
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.logging.Level;
-import javax.xml.bind.DatatypeConverter;
-import org.openide.util.NbBundle;
-import org.python.bouncycastle.util.Arrays;
-import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
-import org.sleuthkit.autopsy.ingest.IngestJobContext;
-import org.sleuthkit.autopsy.ingest.IngestMessage;
-import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
-import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.datamodel.Content;
-import org.sleuthkit.datamodel.Image;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.TskData;
-import org.openide.util.NbBundle;
-
-/**
- * Data source ingest module that verifies the integrity of an Expert Witness
- * Format (EWF) E01 image file by generating a hash of the file and comparing it
- * to the value stored in the image.
- */
-@NbBundle.Messages({
- "UnableToCalculateHashes=Unable to calculate MD5 hashes."
-})
-public class E01VerifyIngestModule implements DataSourceIngestModule {
-
- private static final Logger logger = Logger.getLogger(E01VerifyIngestModule.class.getName());
- private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
- private static final IngestServices services = IngestServices.getInstance();
-
- private MessageDigest messageDigest;
- private boolean verified = false;
- private String calculatedHash = "";
- private String storedHash = "";
- private IngestJobContext context;
-
- E01VerifyIngestModule() {
- }
-
- @Override
- public void startUp(IngestJobContext context) throws IngestModuleException {
- this.context = context;
- verified = false;
- storedHash = "";
- calculatedHash = "";
-
- try {
- messageDigest = MessageDigest.getInstance("MD5"); //NON-NLS
- } catch (NoSuchAlgorithmException ex) {
- throw new IngestModuleException(Bundle.UnableToCalculateHashes(), ex);
- }
- }
-
- @Override
- public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
- String imgName = dataSource.getName();
-
- // Skip non-images
- if (!(dataSource instanceof Image)) {
- logger.log(Level.INFO, "Skipping non-image {0}", imgName); //NON-NLS
- services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(),
- NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.skipNonEwf",
- imgName)));
- return ProcessResult.OK;
- }
- Image img = (Image) dataSource;
-
- // Skip images that are not E01
- if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
- logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName); //NON-NLS
- services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(),
- NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.skipNonEwf",
- imgName)));
- return ProcessResult.OK;
- }
-
- // Report an error for null or empty MD5
- if ((img.getMd5() == null) || img.getMd5().isEmpty()) {
- services.postMessage(IngestMessage.createMessage(MessageType.ERROR, E01VerifierModuleFactory.getModuleName(),
- NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.noStoredHash",
- imgName)));
- return ProcessResult.ERROR;
- }
-
- storedHash = img.getMd5().toLowerCase();
- logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); //NON-NLS
-
- logger.log(Level.INFO, "Starting hash verification of {0}", img.getName()); //NON-NLS
- services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(),
- NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.startingImg",
- imgName)));
-
- long size = img.getSize();
- if (size == 0) {
- logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS
- services.postMessage(IngestMessage.createMessage(MessageType.ERROR, E01VerifierModuleFactory.getModuleName(),
- NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.errGetSizeOfImg",
- imgName)));
- }
-
- // Libewf uses a sector size of 64 times the sector size, which is the
- // motivation for using it here.
- long chunkSize = 64 * img.getSsize();
- chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
-
- // Casting to double to capture decimals
- int totalChunks = (int) Math.ceil((double) size / (double) chunkSize);
- logger.log(Level.INFO, "Total chunks = {0}", totalChunks); //NON-NLS
- int read;
-
- byte[] data = new byte[(int) chunkSize];
- statusHelper.switchToDeterminate(totalChunks);
-
- // Read in byte size chunks and update the hash value with the data.
- for (int i = 0; i < totalChunks; i++) {
- if (context.dataSourceIngestIsCancelled()) {
- return ProcessResult.OK;
- }
- try {
- read = img.read(data, i * chunkSize, chunkSize);
- } catch (TskCoreException ex) {
- String msg = NbBundle.getMessage(this.getClass(),
- "EwfVerifyIngestModule.process.errReadImgAtChunk", imgName, i);
- services.postMessage(IngestMessage.createMessage(MessageType.ERROR, E01VerifierModuleFactory.getModuleName(), msg));
- logger.log(Level.SEVERE, msg, ex);
- return ProcessResult.ERROR;
- }
-
- // Only update with the read bytes.
- if (read == chunkSize) {
- messageDigest.update(data);
- } else {
- byte[] subData = Arrays.copyOfRange(data, 0, read);
- messageDigest.update(subData);
- }
- statusHelper.progress(i);
- }
-
- // Finish generating the hash and get it as a string value
- calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
- verified = calculatedHash.equals(storedHash);
- logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash}); //NON-NLS
-
- logger.log(Level.INFO, "complete() {0}", E01VerifierModuleFactory.getModuleName()); //NON-NLS
- String msg;
- if (verified) {
- msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verified");
- } else {
- msg = NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.notVerified");
- }
- String extra = NbBundle
- .getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.verifyResultsHeader", imgName);
- extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.resultLi", msg);
- extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.calcHashLi", calculatedHash);
- extra += NbBundle.getMessage(this.getClass(), "EwfVerifyIngestModule.shutDown.storedHashLi", storedHash);
- services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(), imgName + msg, extra));
- logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
-
- return ProcessResult.OK;
- }
-}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
index c1aec6c81b..73af437ec1 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
@@ -23,6 +23,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
@@ -66,6 +67,7 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.EncodedFileOutputStream;
import org.sleuthkit.datamodel.ReadContentInputStream;
+import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@@ -242,28 +244,41 @@ class SevenZipExtractor {
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);
try {
- BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
- artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
+ Collection attributes = new ArrayList<>();
+ attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
"Possible Zip Bomb"));
- artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
+ attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
EmbeddedFileExtractorModuleFactory.getModuleName(),
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())));
- artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
+ attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
EmbeddedFileExtractorModuleFactory.getModuleName(),
details));
- try {
- // index the artifact for keyword search
- blackboard.indexArtifact(artifact);
- } catch (Blackboard.BlackboardException ex) {
- logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
- MessageNotifyUtil.Notify.error(
- Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
+
+ SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
+ org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
+ // Create artifact if it doesn't already exist.
+ if (!tskBlackboard.artifactExists(archiveFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ artifact.addAttributes(attributes);
+
+ try {
+ // index the artifact for keyword search
+ blackboard.indexArtifact(artifact);
+ } catch (Blackboard.BlackboardException ex) {
+ logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
+ MessageNotifyUtil.Notify.error(
+ Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
+ }
+
+ services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
+
+ services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
- services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS
+ } catch (NoCurrentCaseException ex) {
+ logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
}
- services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
index d3db3966e7..c8ded79a50 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
@@ -38,6 +38,7 @@ import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
+import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
@@ -77,7 +78,6 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private final AtomicInteger filesProcessed = new AtomicInteger(0);
- private final List listOfFacesDetectedArtifacts = new ArrayList<>();
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector;
@@ -190,12 +190,12 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
ExifIFD0Directory devDir = metadata.getFirstDirectoryOfType(ExifIFD0Directory.class);
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
- if (model != null && !model.isEmpty()) {
+ if (StringUtils.isNotBlank(model)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
}
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
- if (make != null && !make.isEmpty()) {
+ if (StringUtils.isNotBlank(make)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
index c8d88f5d46..f79c10a20c 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
@@ -147,23 +147,29 @@ public class FileTypeIdIngestModule implements FileIngestModule {
*/
private void createInterestingFileHit(AbstractFile file, FileType fileType) {
try {
- BlackboardArtifact artifact;
- artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
Collection attributes = new ArrayList<>();
- BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName());
- attributes.add(setNameAttribute);
- BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType());
- attributes.add(ruleNameAttribute);
- artifact.addAttributes(attributes);
- try {
- Case.getCurrentCaseThrows().getServices().getBlackboard().indexArtifact(artifact);
- } catch (Blackboard.BlackboardException ex) {
- logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
- } catch (NoCurrentCaseException ex) {
- logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
+ attributes.add(new BlackboardAttribute(
+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()));
+ attributes.add(new BlackboardAttribute(
+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()));
+
+ Case currentCase = Case.getCurrentCaseThrows();
+ org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+ // Create artifact if it doesn't already exist.
+ if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ artifact.addAttributes(attributes);
+
+ try {
+ currentCase.getServices().getBlackboard().indexArtifact(artifact);
+ } catch (Blackboard.BlackboardException ex) {
+ logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
+ }
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS
+ } catch (NoCurrentCaseException ex) {
+ logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
index 88eea65dda..0e0160b9b9 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
@@ -106,12 +106,15 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
@Override
@Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."})
public ProcessResult process(AbstractFile file) {
+ Case currentCase;
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
+ blackboard = currentCase.getServices().getBlackboard();
+
// Skip slack space files.
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) {
return ProcessResult.OK;
@@ -126,7 +129,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
// Post an interesting files set hit artifact to the
// blackboard.
String moduleName = InterestingItemsIngestModuleFactory.getModuleName();
- BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+
Collection attributes = new ArrayList<>();
// Add a set name attribute to the artifact. This adds a
@@ -141,29 +144,34 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
// interesting files set membership rule that was satisfied.
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, moduleName, ruleSatisfied);
attributes.add(ruleNameAttribute);
+
+ org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+ // Create artifact if it doesn't already exist.
+ if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ artifact.addAttributes(attributes);
+
+ try {
+ // index the artifact for keyword search
+ blackboard.indexArtifact(artifact);
+ } catch (Blackboard.BlackboardException ex) {
+ logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
+ MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
+ }
- artifact.addAttributes(attributes);
- try {
- // index the artifact for keyword search
- blackboard.indexArtifact(artifact);
- } catch (Blackboard.BlackboardException ex) {
- logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
- MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
+ services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
+
+ // make an ingest inbox message
+ StringBuilder detailsSb = new StringBuilder();
+ detailsSb.append("File: " + file.getParentPath() + file.getName() + "
\n");
+ detailsSb.append("Rule Set: " + filesSet.getName());
+
+ services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
+ "Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
+ detailsSb.toString(),
+ file.getName(),
+ artifact));
}
-
- services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
-
- // make an ingest inbox message
- StringBuilder detailsSb = new StringBuilder();
- detailsSb.append("File: " + file.getParentPath() + file.getName() + "
\n");
- detailsSb.append("Rule Set: " + filesSet.getName());
-
- services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
- "Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
- detailsSb.toString(),
- file.getName(),
- artifact));
-
} catch (TskCoreException ex) {
FilesIdentifierIngestModule.logger.log(Level.SEVERE, "Error posting to the blackboard", ex); //NOI18N NON-NLS
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSet.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSet.java
index 87d25453e7..0e91869151 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSet.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSet.java
@@ -833,7 +833,7 @@ public final class FilesSet implements Serializable {
// If there is a leading ".", strip it since
// AbstractFile.getFileNameExtension() returns just the
// extension chars and not the dot.
- super(extension.startsWith(".") ? extension.substring(1) : extension, false);
+ super(normalize(extension), false);
}
/**
@@ -842,10 +842,10 @@ public final class FilesSet implements Serializable {
* @param extensions The file name extensions to be matched.
*/
public ExtensionCondition(List extensions) {
- // If there is a leading ".", strip it since
+ // If there is a leading "." in any list value, strip it since
// AbstractFile.getFileNameExtension() returns just the
// extension chars and not the dot.
- super(extensions);
+ super(normalize(extensions));
}
/**
@@ -862,6 +862,34 @@ public final class FilesSet implements Serializable {
public boolean passes(AbstractFile file) {
return this.textMatches(file.getNameExtension());
}
+
+ /**
+ * Strip "." from the start of extensions in the provided list.
+ *
+ * @param extensions The list of extensions to be processed.
+ *
+ * @return A post-processed list of extensions.
+ */
+ private static List normalize(List extensions) {
+ List values = new ArrayList<>(extensions);
+
+ for (int i=0; i < values.size(); i++) {
+ values.set(i, normalize(values.get(i)));
+ }
+
+ return values;
+ }
+
+ /**
+ * Strip "." from the start of the provided extension.
+ *
+ * @param extension The extension to be processed.
+ *
+ * @return A post-processed extension.
+ */
+ private static String normalize(String extension) {
+ return extension.startsWith(".") ? extension.substring(1) : extension;
+ }
}
@@ -986,19 +1014,7 @@ public final class FilesSet implements Serializable {
* match.
*/
CaseInsensitiveMultiValueStringComparisionMatcher(List valuesToMatch) {
- List values = new ArrayList<>(valuesToMatch);
- for (int i=0; i < values.size(); i++) {
- // Remove leading and trailing whitespace.
- String tempValue = values.get(i).trim();
-
- // Strip "." from the start of the extension if it exists.
- if (tempValue.startsWith(".")) {
- tempValue = tempValue.substring(1);
- }
-
- values.set(i, tempValue);
- }
- this.valuesToMatch = values;
+ this.valuesToMatch = valuesToMatch;
}
@Override
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
index 7f8285c4a1..9744b0c5bf 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
@@ -485,7 +485,12 @@ final class FilesSetRulePanel extends javax.swing.JPanel {
if (this.fullNameRadioButton.isSelected()) {
condition = new FilesSet.Rule.FullNameCondition(this.nameTextField.getText());
} else {
- condition = new FilesSet.Rule.ExtensionCondition(Arrays.asList(this.nameTextField.getText().split(",")));
+ List extensions = Arrays.asList(this.nameTextField.getText().split(","));
+ for (int i=0; i < extensions.size(); i++) {
+ // Remove leading and trailing whitespace.
+ extensions.set(i, extensions.get(i).trim());
+ }
+ condition = new FilesSet.Rule.ExtensionCondition(extensions);
}
} else {
logger.log(Level.SEVERE, "Attempt to get name condition with illegal chars"); // NON-NLS
diff --git a/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java b/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
index 6b0622f630..4df69801aa 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
@@ -64,15 +64,15 @@ class StixArtifactData {
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
"StixArtifactData.noOpenCase.errMsg=No open case available."})
public void createArtifact(String a_title) throws TskCoreException {
- Blackboard blackboard;
+ Case currentCase;
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage());
return;
}
-
+
String setName;
if (a_title != null) {
setName = "STIX Indicator - " + a_title; //NON-NLS
@@ -80,19 +80,25 @@ class StixArtifactData {
setName = "STIX Indicator - (no title)"; //NON-NLS
}
- BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
Collection attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, "Stix", setName)); //NON-NLS
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, "Stix", observableId)); //NON-NLS
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, "Stix", objType)); //NON-NLS
-
- bba.addAttributes(attributes);
- try {
- // index the artifact for keyword search
- blackboard.indexArtifact(bba);
- } catch (Blackboard.BlackboardException ex) {
- logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
- MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());
+
+ org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+ // Create artifact if it doesn't already exist.
+ if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ bba.addAttributes(attributes);
+
+ try {
+ // index the artifact for keyword search
+ Blackboard blackboard = currentCase.getServices().getBlackboard();
+ blackboard.indexArtifact(bba);
+ } catch (Blackboard.BlackboardException ex) {
+ logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
+ MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());
+ }
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.form b/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.form
index cd033f994d..30995524b2 100755
--- a/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.form
+++ b/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.form
@@ -91,6 +91,9 @@
+
+
+
@@ -116,6 +119,9 @@
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.java b/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.java
index 6fe9ec8ce3..e7f2ae16b5 100755
--- a/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/othercasessearch/OtherCasesSearchDialog.java
@@ -56,19 +56,25 @@ import org.sleuthkit.autopsy.datamodel.EmptyNode;
"OtherCasesSearchDialog.validation.invalidEmail=The supplied value is not a valid e-mail address.",
"OtherCasesSearchDialog.validation.invalidDomain=The supplied value is not a valid domain.",
"OtherCasesSearchDialog.validation.invalidPhone=The supplied value is not a valid phone number.",
+ "OtherCasesSearchDialog.validation.invalidSsid=The supplied value is not a valid wireless network.",
+ "OtherCasesSearchDialog.validation.invalidMac=The supplied value is not a valid MAC address.",
+ "OtherCasesSearchDialog.validation.invalidImei=The supplied value is not a valid IMEI number.",
+ "OtherCasesSearchDialog.validation.invalidImsi=The supplied value is not a valid IMSI number.",
+ "OtherCasesSearchDialog.validation.invalidIccid=The supplied value is not a valid ICCID number.",
"OtherCasesSearchDialog.validation.genericMessage=The supplied value is not valid.",
"# {0} - number of cases",
"OtherCasesSearchDialog.caseLabel.text=The current Central Repository contains {0} case(s)."
})
/**
- * The Search Other Cases dialog allows users to search for specific
- * types of correlation properties in the Central Repository.
+ * The Search Other Cases dialog allows users to search for specific types of
+ * correlation properties in the Central Repository.
*/
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
final class OtherCasesSearchDialog extends javax.swing.JDialog {
+
private static final Logger logger = Logger.getLogger(OtherCasesSearchDialog.class.getName());
private static final long serialVersionUID = 1L;
-
+
private final List correlationTypes;
private CorrelationAttributeInstance.Type selectedCorrelationType;
private TextPrompt correlationValueTextFieldPrompt;
@@ -82,20 +88,20 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
initComponents();
customizeComponents();
}
-
+
/**
* Perform the other cases search.
- *
- * @param type The correlation type.
+ *
+ * @param type The correlation type.
* @param value The value to be matched.
*/
private void search(CorrelationAttributeInstance.Type type, String value) {
new SwingWorker, Void>() {
-
+
@Override
protected List doInBackground() {
List correlationInstances = new ArrayList<>();
-
+
try {
correlationInstances = EamDb.getInstance().getArtifactInstancesByTypeValue(type, value);
} catch (EamDbException ex) {
@@ -115,10 +121,10 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
DataResultViewerTable table = new DataResultViewerTable();
Collection viewers = new ArrayList<>(1);
viewers.add(table);
-
+
OtherCasesSearchNode searchNode = new OtherCasesSearchNode(correlationInstances);
TableFilterNode tableFilterNode = new TableFilterNode(searchNode, true, searchNode.getName());
-
+
String resultsText = String.format("%s (%s; \"%s\")",
Bundle.OtherCasesSearchDialog_resultsTitle_text(), type.getDisplayName(), value);
final TopComponent searchResultWin;
@@ -163,6 +169,11 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
org.openide.awt.Mnemonics.setLocalizedText(correlationValueLabel, org.openide.util.NbBundle.getMessage(OtherCasesSearchDialog.class, "OtherCasesSearchDialog.correlationValueLabel.text")); // NOI18N
correlationValueTextField.setText(org.openide.util.NbBundle.getMessage(OtherCasesSearchDialog.class, "OtherCasesSearchDialog.correlationValueTextField.text")); // NOI18N
+ correlationValueTextField.addKeyListener(new java.awt.event.KeyAdapter() {
+ public void keyReleased(java.awt.event.KeyEvent evt) {
+ valueFieldKeyReleaseListener(evt);
+ }
+ });
org.openide.awt.Mnemonics.setLocalizedText(searchButton, org.openide.util.NbBundle.getMessage(OtherCasesSearchDialog.class, "OtherCasesSearchDialog.searchButton.text")); // NOI18N
searchButton.addActionListener(new java.awt.event.ActionListener() {
@@ -171,6 +182,12 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
}
});
+ correlationTypeComboBox.addActionListener(new java.awt.event.ActionListener() {
+ public void actionPerformed(java.awt.event.ActionEvent evt) {
+ correlationTypeComboBoxActionPerformed(evt);
+ }
+ });
+
org.openide.awt.Mnemonics.setLocalizedText(correlationTypeLabel, org.openide.util.NbBundle.getMessage(OtherCasesSearchDialog.class, "OtherCasesSearchDialog.correlationTypeLabel.text")); // NOI18N
errorLabel.setForeground(new java.awt.Color(255, 0, 0));
@@ -235,7 +252,7 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
private void searchButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_searchButtonActionPerformed
CorrelationAttributeInstance.Type correlationType = selectedCorrelationType;
String correlationValue = correlationValueTextField.getText().trim();
-
+
if (validateInputs(correlationType, correlationValue)) {
search(correlationType, correlationValue);
dispose();
@@ -254,23 +271,48 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
case CorrelationAttributeInstance.PHONE_TYPE_ID:
validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidPhone();
break;
+ case CorrelationAttributeInstance.SSID_TYPE_ID:
+ validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidSsid();
+ break;
+ case CorrelationAttributeInstance.MAC_TYPE_ID:
+ validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidMac();
+ break;
+ case CorrelationAttributeInstance.IMEI_TYPE_ID:
+ validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidImei();
+ break;
+ case CorrelationAttributeInstance.IMSI_TYPE_ID:
+ validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidImsi();
+ break;
+ case CorrelationAttributeInstance.ICCID_TYPE_ID:
+ validationMessage = Bundle.OtherCasesSearchDialog_validation_invalidIccid();
+ break;
default:
validationMessage = Bundle.OtherCasesSearchDialog_validation_genericMessage();
break;
-
+
}
errorLabel.setText(validationMessage);
searchButton.setEnabled(false);
correlationValueTextField.grabFocus();
}
}//GEN-LAST:event_searchButtonActionPerformed
-
+
+ private void correlationTypeComboBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_correlationTypeComboBoxActionPerformed
+ //make error message go away when combo box is selected
+ errorLabel.setText("");
+ }//GEN-LAST:event_correlationTypeComboBoxActionPerformed
+
+ private void valueFieldKeyReleaseListener(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_valueFieldKeyReleaseListener
+ //make error message go away when the user enters anything in the value field
+ errorLabel.setText("");
+ }//GEN-LAST:event_valueFieldKeyReleaseListener
+
/**
* Validate the supplied input.
- *
- * @param type The correlation type.
+ *
+ * @param type The correlation type.
* @param value The value to be validated.
- *
+ *
* @return True if the input is valid for the given type; otherwise false.
*/
private boolean validateInputs(CorrelationAttributeInstance.Type type, String value) {
@@ -280,16 +322,16 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
// No need to log this.
return false;
}
-
+
return true;
}
-
+
/**
* Further customize the components beyond the standard initialization.
*/
private void customizeComponents() {
searchButton.setEnabled(false);
-
+
/*
* Add correlation types to the combo-box.
*/
@@ -307,7 +349,7 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
correlationTypeComboBox.addItem(type.getDisplayName());
}
correlationTypeComboBox.setSelectedIndex(0);
-
+
correlationTypeComboBox.addItemListener(new ItemListener() {
@Override
public void itemStateChanged(ItemEvent e) {
@@ -316,9 +358,9 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
updateSearchButton();
}
});
-
+
updateSelectedType();
-
+
/*
* Create listener for text input.
*/
@@ -338,17 +380,21 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
updateSearchButton();
}
});
-
+
updateCorrelationValueTextFieldPrompt();
}
-
+
@Messages({
"OtherCasesSearchDialog.correlationValueTextField.filesExample=Example: \"f0e1d2c3b4a5968778695a4b3c2d1e0f\"",
"OtherCasesSearchDialog.correlationValueTextField.domainExample=Example: \"domain.com\"",
"OtherCasesSearchDialog.correlationValueTextField.emailExample=Example: \"user@host.com\"",
"OtherCasesSearchDialog.correlationValueTextField.phoneExample=Example: \"(800)123-4567\"",
"OtherCasesSearchDialog.correlationValueTextField.usbExample=Example: \"4&1234567&0\"",
- "OtherCasesSearchDialog.correlationValueTextField.ssidExample=Example: \"WirelessNetwork-5G\""
+ "OtherCasesSearchDialog.correlationValueTextField.ssidExample=Example: \"WirelessNetwork-5G\"",
+ "OtherCasesSearchDialog.correlationValueTextField.macExample=Example: \"0C-14-F2-01-AF-45\"",
+ "OtherCasesSearchDialog.correlationValueTextField.imeiExample=Example: \"351756061523999\"",
+ "OtherCasesSearchDialog.correlationValueTextField.imsiExample=Example: \"310150123456789\"",
+ "OtherCasesSearchDialog.correlationValueTextField.iccidExample=Example: \"89 91 19 1299 99 329451 0\""
})
/**
* Update the text prompt of the name text field based on the input type
@@ -359,7 +405,7 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
* Add text prompt to the text field.
*/
String text;
- switch(selectedCorrelationType.getId()) {
+ switch (selectedCorrelationType.getId()) {
case CorrelationAttributeInstance.FILES_TYPE_ID:
text = Bundle.OtherCasesSearchDialog_correlationValueTextField_filesExample();
break;
@@ -378,22 +424,34 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
case CorrelationAttributeInstance.SSID_TYPE_ID:
text = Bundle.OtherCasesSearchDialog_correlationValueTextField_ssidExample();
break;
+ case CorrelationAttributeInstance.MAC_TYPE_ID:
+ text = Bundle.OtherCasesSearchDialog_correlationValueTextField_macExample();
+ break;
+ case CorrelationAttributeInstance.IMEI_TYPE_ID:
+ text = Bundle.OtherCasesSearchDialog_correlationValueTextField_imeiExample();
+ break;
+ case CorrelationAttributeInstance.IMSI_TYPE_ID:
+ text = Bundle.OtherCasesSearchDialog_correlationValueTextField_imsiExample();
+ break;
+ case CorrelationAttributeInstance.ICCID_TYPE_ID:
+ text = Bundle.OtherCasesSearchDialog_correlationValueTextField_iccidExample();
+ break;
default:
text = "";
break;
}
correlationValueTextFieldPrompt = new TextPrompt(text, correlationValueTextField);
-
+
/**
* Sets the foreground color and transparency of the text prompt.
*/
correlationValueTextFieldPrompt.setForeground(Color.LIGHT_GRAY);
correlationValueTextFieldPrompt.changeAlpha(0.9f); // Mostly opaque
-
+
validate();
repaint();
}
-
+
/**
* Update the 'selectedCorrelationType' value to match the selected type
* from the combo-box.
@@ -406,7 +464,7 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
}
}
}
-
+
/**
* Enable or disable the Search button depending on whether or not text has
* been provided for the correlation property value.
@@ -433,4 +491,4 @@ final class OtherCasesSearchDialog extends javax.swing.JDialog {
private javax.swing.JLabel errorLabel;
private javax.swing.JButton searchButton;
// End of variables declaration//GEN-END:variables
-}
\ No newline at end of file
+}
diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
index 0f4a568ba1..2f709310b6 100644
--- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
+++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
@@ -285,7 +285,19 @@ class ReportHTML implements TableReportModule {
in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/accounts.png"); //NON-NLS
break;
case TSK_WIFI_NETWORK:
- in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/network-wifi.png"); //NON-NLS
+ in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/network-wifi.png"); //NON-NLS
+ break;
+ case TSK_WIFI_NETWORK_ADAPTER:
+ in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/network-wifi.png"); //NON-NLS
+ break;
+ case TSK_SIM_ATTACHED:
+ in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/sim_card.png"); //NON-NLS
+ break;
+ case TSK_BLUETOOTH_ADAPTER:
+ in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/Bluetooth.png"); //NON-NLS
+ break;
+ case TSK_DEVICE_INFO:
+ in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/images/devices.png"); //NON-NLS
break;
default:
logger.log(Level.WARNING, "useDataTypeIcon: unhandled artifact type = {0}", dataType); //NON-NLS
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
index e4d6ae5dd8..ebdc552ec8 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/InterCaseTestUtils.java
@@ -56,11 +56,11 @@ import org.sleuthkit.autopsy.commonfilesearch.CaseDBCommonAttributeInstanceNode;
import org.sleuthkit.autopsy.commonfilesearch.CentralRepoCommonAttributeInstance;
import org.sleuthkit.autopsy.commonfilesearch.CentralRepoCommonAttributeInstanceNode;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeSearchResults;
-import org.sleuthkit.autopsy.commonfilesearch.DataSourceLoader;
+import org.sleuthkit.autopsy.guiutils.DataSourceLoader;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValue;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValueList;
import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
-import org.sleuthkit.autopsy.modules.e01verify.E01VerifierModuleFactory;
+import org.sleuthkit.autopsy.modules.dataSourceIntegrity.DataSourceIntegrityModuleFactory;
import org.sleuthkit.autopsy.modules.embeddedfileextractor.EmbeddedFileExtractorModuleFactory;
import org.sleuthkit.autopsy.modules.exif.ExifParserModuleFactory;
import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleFactory;
@@ -183,8 +183,8 @@ class InterCaseTestUtils {
final IngestModuleTemplate hashLookupTemplate = IngestUtils.getIngestModuleTemplate(new HashLookupModuleFactory());
final IngestModuleTemplate vmExtractorTemplate = IngestUtils.getIngestModuleTemplate(new VMExtractorIngestModuleFactory());
final IngestModuleTemplate photoRecTemplate = IngestUtils.getIngestModuleTemplate(new PhotoRecCarverIngestModuleFactory());
- final IngestModuleTemplate e01VerifierTemplate = IngestUtils.getIngestModuleTemplate(new E01VerifierModuleFactory());
- final IngestModuleTemplate eamDbTemplate = IngestUtils.getIngestModuleTemplate(new org.sleuthkit.autopsy.centralrepository.ingestmodule.IngestModuleFactory());
+ final IngestModuleTemplate dataSourceIntegrityTemplate = IngestUtils.getIngestModuleTemplate(new DataSourceIntegrityModuleFactory());
+ final IngestModuleTemplate eamDbTemplate = IngestUtils.getIngestModuleTemplate(new org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleFactory());
final IngestModuleTemplate fileExtMismatchDetectorTemplate = IngestUtils.getIngestModuleTemplate(new FileExtMismatchDetectorModuleFactory());
//TODO we need to figure out how to get ahold of these objects because they are required for properly filling the CR with test data
// final IngestModuleTemplate objectDetectorTemplate = IngestUtils.getIngestModuleTemplate(new org.sleuthkit.autopsy.experimental.objectdetection.ObjectDetectionModuleFactory());
@@ -217,10 +217,10 @@ class InterCaseTestUtils {
kitchenSink.add(hashLookupTemplate);
kitchenSink.add(vmExtractorTemplate);
kitchenSink.add(photoRecTemplate);
- kitchenSink.add(e01VerifierTemplate);
+ kitchenSink.add(dataSourceIntegrityTemplate);
kitchenSink.add(eamDbTemplate);
kitchenSink.add(fileExtMismatchDetectorTemplate);
- //TODO this list should probably be populated by way of loading the appropriate modules based on finding all of the @ServiceProvider(service = IngestModuleFactory.class) types
+ //TODO this list should probably be populated by way of loading the appropriate modules based on finding all of the @ServiceProvider(service = CentralRepoIngestModuleFactory.class) types
// kitchenSink.add(objectDetectorTemplate);
// kitchenSink.add(emailParserTemplate);
// kitchenSink.add(recentActivityTemplate);
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IntraCaseTestUtils.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IntraCaseTestUtils.java
index eacbad7a6d..d8f9f86ec3 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IntraCaseTestUtils.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonfilessearch/IntraCaseTestUtils.java
@@ -36,7 +36,7 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.commonfilesearch.AbstractCommonAttributeInstance;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeSearchResults;
-import org.sleuthkit.autopsy.commonfilesearch.DataSourceLoader;
+import org.sleuthkit.autopsy.guiutils.DataSourceLoader;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValue;
import org.sleuthkit.autopsy.commonfilesearch.CommonAttributeValueList;
import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java
index bff999c810..2f18643c61 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java
@@ -24,6 +24,7 @@ import java.io.File;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -374,25 +375,32 @@ class VolatilityProcessor {
continue;
}
try {
- BlackboardArtifact volArtifact = resolvedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
- BlackboardAttribute att1 = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, VOLATILITY, Bundle.VolatilityProcessor_artifactAttribute_interestingFileSet(pluginName));
- volArtifact.addAttribute(att1);
+ Collection attributes = new ArrayList<>();
+ attributes.add(new BlackboardAttribute(
+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, VOLATILITY, Bundle.VolatilityProcessor_artifactAttribute_interestingFileSet(pluginName)));
+
+ org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+ // Create artifact if it doesn't already exist.
+ if (!tskBlackboard.artifactExists(resolvedFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact volArtifact = resolvedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ volArtifact.addAttributes(attributes);
- try {
- // index the artifact for keyword search
- blackboard.indexArtifact(volArtifact);
- } catch (Blackboard.BlackboardException ex) {
- errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName));
- /*
- * Log the exception as well as add it to the error
- * messages, to ensure that the stack trace is not
- * lost.
- */
- logger.log(Level.SEVERE, String.format("Failed to index artifact (artifactId=%d) for for output of %s plugin", volArtifact.getArtifactID(), pluginName), ex);
+ try {
+ // index the artifact for keyword search
+ blackboard.indexArtifact(volArtifact);
+ } catch (Blackboard.BlackboardException ex) {
+ errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName));
+ /*
+ * Log the exception as well as add it to the error
+ * messages, to ensure that the stack trace is not
+ * lost.
+ */
+ logger.log(Level.SEVERE, String.format("Failed to index artifact (artifactId=%d) for for output of %s plugin", volArtifact.getArtifactID(), pluginName), ex);
+ }
+
+ // fire event to notify UI of this new artifact
+ services.fireModuleDataEvent(new ModuleDataEvent(VOLATILITY, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
-
- // fire event to notify UI of this new artifact
- services.fireModuleDataEvent(new ModuleDataEvent(VOLATILITY, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
} catch (TskCoreException ex) {
throw new VolatilityProcessorException(Bundle.VolatilityProcessor_exceptionMessage_errorCreatingArtifact(pluginName), ex);
}
diff --git a/ImageGallery/nbproject/platform.properties b/ImageGallery/nbproject/platform.properties
index 351256334b..0c717f4f53 100644
--- a/ImageGallery/nbproject/platform.properties
+++ b/ImageGallery/nbproject/platform.properties
@@ -7,7 +7,7 @@ nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version}
harness.dir=${nbplatform.active.dir}/harness
bootstrap.url=http://bits.netbeans.org/dev/nbms-and-javadoc/lastSuccessfulBuild/artifact/nbbuild/netbeans/harness/tasks.jar
# Where we get the platform from. To see what versions are available, open URL in browser up to the .../updates part of the URL
-autoupdate.catalog.url=http://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
+autoupdate.catalog.url=https://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
cluster.path=\
${nbplatform.active.dir}/harness:\
${nbplatform.active.dir}/java:\
diff --git a/ScalpelCarver/nbproject/platform.properties b/ScalpelCarver/nbproject/platform.properties
index bff2a507cf..1562be66c8 100644
--- a/ScalpelCarver/nbproject/platform.properties
+++ b/ScalpelCarver/nbproject/platform.properties
@@ -8,7 +8,7 @@ nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version}
harness.dir=${nbplatform.active.dir}/harness
bootstrap.url=http://bits.netbeans.org/dev/nbms-and-javadoc/lastSuccessfulBuild/artifact/nbbuild/netbeans/harness/tasks.jar
# Where we get the platform from. To see what versions are available, open URL in browser up to the .../updates part of the URL
-autoupdate.catalog.url=http://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
+autoupdate.catalog.url=https://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
cluster.path=\
${nbplatform.active.dir}/harness:\
${nbplatform.active.dir}/java:\
diff --git a/nbproject/platform.properties b/nbproject/platform.properties
index 86a5375b54..898ac95983 100644
--- a/nbproject/platform.properties
+++ b/nbproject/platform.properties
@@ -7,7 +7,7 @@ nbplatform.active.dir=${suite.dir}/netbeans-plat/${netbeans-plat-version}
harness.dir=${nbplatform.active.dir}/harness
bootstrap.url=http://bits.netbeans.org/dev/nbms-and-javadoc/lastSuccessfulBuild/artifact/nbbuild/netbeans/harness/tasks.jar
# Where we get the platform from. To see what versions are available, open URL in browser up to the .../updates part of the URL
-autoupdate.catalog.url=http://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
+autoupdate.catalog.url=https://updates.netbeans.org/netbeans/updates/${netbeans-plat-version}/uc/final/distribution/catalog.xml.gz
cluster.path=\
${nbplatform.active.dir}/harness:\
${nbplatform.active.dir}/java:\