From ebf34dab14ad4517b10af4b370b819bb7ee65ac9 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 7 Nov 2017 14:15:49 -0500 Subject: [PATCH 01/17] Allow user to import Encase hashsets into central repo --- .../ImportCentralRepoDbProgressDialog.java | 179 +++++++++++++++++- 1 file changed, 177 insertions(+), 2 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 597cb8c39d..969e84b0b8 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -26,6 +26,7 @@ import java.io.File; import java.io.FileReader; import java.util.HashSet; import java.util.Set; +import java.util.List; import java.util.logging.Level; import javax.swing.JFrame; import javax.swing.SwingWorker; @@ -76,14 +77,24 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P bnOk.setEnabled(false); } + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.unknownFormat.message=Hash set to import is an unknown format"}) void importFile(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean readOnly, String importFileName){ setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); File importFile = new File(importFileName); - worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, - knownFilesType, readOnly, importFile); + if(importFileName.endsWith(".idx")){ // < need case insensitive + worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, + knownFilesType, readOnly, importFile); + } else if(importFileName.endsWith(".hash")){ // < need case insensitive + worker = new ImportEncaseWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, + knownFilesType, readOnly, importFile); + } else { + // We've gotten here with a format that can't be processed + JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_unknownFormat_message()); + return; + } worker.addPropertyChangeListener(this); worker.execute(); @@ -131,6 +142,170 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P HashDbManager.HashDatabase getDatabase(); } + class ImportEncaseWorker extends SwingWorker implements CentralRepoImportWorker{ + private final int HASH_IMPORT_THRESHOLD = 10000; + private final String hashSetName; + private final String version; + private final int orgId; + private final boolean searchDuringIngest; + private final boolean sendIngestMessages; + private final HashDbManager.HashDb.KnownFilesType knownFilesType; + private final boolean readOnly; + private final File importFile; + private final long totalLines; + private int crIndex = -1; + private HashDbManager.CentralRepoHashDb newHashDb = null; + private final AtomicLong numLines = new AtomicLong(); + + ImportEncaseWorker(String hashSetName, String version, int orgId, + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, File importFile){ + + this.hashSetName = hashSetName; + this.version = version; + this.orgId = orgId; + this.searchDuringIngest = searchDuringIngest; + this.sendIngestMessages = sendIngestMessages; + this.knownFilesType = knownFilesType; + this.readOnly = readOnly; + this.importFile = importFile; + this.numLines.set(0); + + this.totalLines = getEstimatedTotalHashes(); + } + + /** + * Encase files have a 0x480 byte header, then each hash takes 18 bytes + * @return Approximate number of hashes in the file + */ + final long getEstimatedTotalHashes(){ + long fileSize = importFile.length(); + if(fileSize < 0x492){ + return 1; // There's room for at most one hash + } + return ((fileSize - 0x492) / 18); + } + + @Override + public HashDbManager.HashDatabase getDatabase(){ + return newHashDb; + } + + @Override + public long getLinesProcessed(){ + return numLines.get(); + } + + @Override + public int getProgressPercentage(){ + return this.getProgress(); + } + + @Override + protected Void doInBackground() throws Exception { + + EncaseHashSetParser encaseParser = new EncaseHashSetParser(this.importFile.getAbsolutePath()); + + TskData.FileKnown knownStatus; + if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { + knownStatus = TskData.FileKnown.KNOWN; + } else { + knownStatus = TskData.FileKnown.BAD; + } + + // Create an empty hashset in the central repository + crIndex = EamDb.getInstance().newReferenceSet(orgId, hashSetName, version, knownStatus, readOnly); + + EamDb dbManager = EamDb.getInstance(); + CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type + + Set globalInstances = new HashSet<>(); + + while (! encaseParser.doneReading()) { + if(isCancelled()){ + return null; + } + + String newHash = encaseParser.getNextHash(); + + if(newHash != null){ + EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( + crIndex, + newHash, + knownStatus, + ""); + + globalInstances.add(eamGlobalFileInstance); + numLines.incrementAndGet(); + + if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){ + dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); + globalInstances.clear(); + + int progress = (int)(numLines.get() * 100 / totalLines); + if(progress < 100){ + this.setProgress(progress); + } else { + this.setProgress(99); + } + } + } + } + + dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); + this.setProgress(100); + return null; + } + + private void deleteIncompleteSet(int crIndex){ + if(crIndex >= 0){ + + // This can be slow on large reference sets + Executors.newSingleThreadExecutor().execute(new Runnable() { + @Override + public void run() { + try{ + EamDb.getInstance().deleteReferenceSet(crIndex); + } catch (EamDbException ex2){ + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); + } + } + }); + } + } + + @Override + protected void done() { + + if(isCancelled()){ + // If the user hit cancel, delete this incomplete hash set from the central repo + deleteIncompleteSet(crIndex); + return; + } + + try { + get(); + try{ + newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, + crIndex, + searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); + } catch (TskCoreException ex){ + JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message()); + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); + } + } catch (Exception ex) { + // Delete this incomplete hash set from the central repo + if(crIndex >= 0){ + try{ + EamDb.getInstance().deleteReferenceSet(crIndex); + } catch (EamDbException ex2){ + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex); + } + } + } + } + } + class ImportIDXWorker extends SwingWorker implements CentralRepoImportWorker{ private final int HASH_IMPORT_THRESHOLD = 10000; From 6494a23deeff2507c2215236fbfa01203788f04e Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 7 Nov 2017 14:19:05 -0500 Subject: [PATCH 02/17] Adding encase parser file --- .../hashdatabase/EncaseHashSetParser.java | 167 ++++++++++++++++++ 1 file changed, 167 insertions(+) create mode 100644 Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java new file mode 100644 index 0000000000..df9d78b7a3 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -0,0 +1,167 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.hashdatabase; + +import java.io.InputStream; +import java.io.BufferedInputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.lang.StringBuilder; +import java.util.Arrays; +import java.util.List; +import java.util.ArrayList; +import java.util.logging.Level; +import javax.swing.JOptionPane; +import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.core.RuntimeProperties; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.TskCoreException; + +class EncaseHashSetParser { + final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, + (byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00}; + InputStream inputStream; + final int expectedHashes; + int totalHashesRead = 0; + + /** + * Opens the import file and parses the header. + * @param filename The Encase hashset + * @throws TskCoreException There was an error opening/reading the file or it is not the correct format + */ + @NbBundle.Messages({"EncaseHashSetParser.fileOpenError.text=Error reading import file", + "EncaseHashSetParser.wrongFormat.text=Hashset is not Encase format"}) + EncaseHashSetParser(String filename) throws TskCoreException{ + try{ + inputStream = new BufferedInputStream(new FileInputStream(filename)); + + // Read in and test the 16 byte header + byte[] header = new byte[16]; + readBuffer(header, 16); + if(! Arrays.equals(header, encaseHeader)){ + displayError(NbBundle.getMessage(this.getClass(), + "EncaseHashSetParser.wrongFormat.text")); + close(); + throw new TskCoreException("File " + filename + " does not have an Encase header"); + } + + // Read in the expected number of hashes + byte[] sizeBuffer = new byte[4]; + readBuffer(sizeBuffer, 4); + expectedHashes = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16) + | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); + + // Read in a bunch of nulls + byte[] filler = new byte[0x3f4]; + readBuffer(filler, 0x3f4); + + // Read in the hash set name + byte[] nameBuffer = new byte[0x50]; + readBuffer(nameBuffer, 0x50); + + // Read in the hash set type + byte[] typeBuffer = new byte[0x28]; + readBuffer(typeBuffer, 0x28); + + } catch (IOException ex){ + displayError(NbBundle.getMessage(this.getClass(), + "EncaseHashSetParser.fileOpenError.text")); + close(); + throw new TskCoreException("Error reading " + filename, ex); + } catch (TskCoreException ex){ + close(); + throw ex; + } + } + + int getExpectedHashes(){ + return expectedHashes; + } + + synchronized boolean doneReading(){ + if(inputStream == null){ + return true; + } + + return(totalHashesRead >= expectedHashes); + } + + synchronized String getNextHash() throws TskCoreException{ + if(inputStream == null){ + return null; + } + + byte[] hashBytes = new byte[16]; + byte[] divider = new byte[2]; + try{ + + readBuffer(hashBytes, 16); + readBuffer(divider, 2); + + StringBuilder sb = new StringBuilder(); + for (byte b : hashBytes) { + sb.append(String.format("%02x", b)); + } + + totalHashesRead++; + return sb.toString(); + } catch (IOException ex){ + // Log it and return what we've got + Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Ran out of data while reading Encase hash sets", ex); + close(); + throw new TskCoreException("Error reading hash", ex); + } + } + + synchronized final void close(){ + if(inputStream != null){ + try{ + inputStream.close(); + } catch (IOException ex){ + Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set", ex); + } finally { + inputStream = null; + } + } + } + + @NbBundle.Messages({"EncaseHashSetParser.outOfData.text=Ran out of data while parsing file"}) + private synchronized void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { + if(inputStream == null){ + throw new TskCoreException("readBuffer called on null inputStream"); + } + if(length != inputStream.read(buffer)){ + displayError(NbBundle.getMessage(this.getClass(), + "EncaseHashSetParser.outOfData.text")); + close(); + throw new TskCoreException("Ran out of data while parsing Encase file"); + } + } + + @NbBundle.Messages({"EncaseHashSetParser.error.title=Error importing Encase hashset"}) + private void displayError(String errorText){ + if(RuntimeProperties.runningWithGUI()){ + JOptionPane.showMessageDialog(null, + errorText, + NbBundle.getMessage(this.getClass(), + "EncaseHashSetParser.error.title"), + JOptionPane.ERROR_MESSAGE); + } + } +} From d9e97d1b94c0ba514621f5e6ec42307ab216c53c Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Wed, 8 Nov 2017 12:08:52 -0500 Subject: [PATCH 03/17] Finished encase hash set importing --- .../HashDbImportDatabaseDialog.java | 4 +-- .../ImportCentralRepoDbProgressDialog.java | 30 ++++++++++++++++--- 2 files changed, 28 insertions(+), 6 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 66037cccf9..92e662f43e 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -90,11 +90,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { fileChooser.setMultiSelectionEnabled(false); } - @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx only)"}) + @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx or .hash only)"}) private void updateFileChooserFilter() { fileChooser.resetChoosableFileFilters(); if(centralRepoRadioButton.isSelected()){ - String[] EXTENSION = new String[]{"idx"}; //NON-NLS + String[] EXTENSION = new String[]{"hash", "Hash", "idx"}; //NON-NLS FileNameExtensionFilter filter = new FileNameExtensionFilter( NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.centralRepoExtFilter.text"), EXTENSION); fileChooser.setFileFilter(filter); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 969e84b0b8..8f83a5e3b4 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -18,6 +18,7 @@ */ package org.sleuthkit.autopsy.modules.hashdatabase; +import java.awt.Color; import java.awt.Cursor; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; @@ -84,10 +85,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); File importFile = new File(importFileName); - if(importFileName.endsWith(".idx")){ // < need case insensitive + if(importFileName.toLowerCase().endsWith(".idx")){ worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); - } else if(importFileName.endsWith(".hash")){ // < need case insensitive + } else if(importFileName.toLowerCase().endsWith(".hash")){ worker = new ImportEncaseWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); } else { @@ -123,8 +124,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P bnCancel.setEnabled(false); bnOk.setEnabled(true); - progressBar.setValue(progressBar.getMaximum()); - lbProgress.setText(getProgressString()); + if(worker.getError().isEmpty()){ + progressBar.setValue(progressBar.getMaximum()); + lbProgress.setText(getProgressString()); + } else { + progressBar.setValue(0); + lbProgress.setForeground(Color.red); + lbProgress.setText(worker.getError()); + } } } @@ -140,6 +147,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P int getProgressPercentage(); long getLinesProcessed(); HashDbManager.HashDatabase getDatabase(); + String getError(); } class ImportEncaseWorker extends SwingWorker implements CentralRepoImportWorker{ @@ -156,6 +164,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private int crIndex = -1; private HashDbManager.CentralRepoHashDb newHashDb = null; private final AtomicLong numLines = new AtomicLong(); + private String errorString = ""; ImportEncaseWorker(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, @@ -201,6 +210,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return this.getProgress(); } + @Override + public String getError(){ + return errorString; + } + @Override protected Void doInBackground() throws Exception { @@ -274,6 +288,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importError=Error importing hash set"}) @Override protected void done() { @@ -302,6 +317,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex); } } + errorString = Bundle.ImportCentralRepoDbProgressDialog_importError(); } } } @@ -321,6 +337,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private int crIndex = -1; private HashDbManager.CentralRepoHashDb newHashDb = null; private final AtomicLong numLines = new AtomicLong(); + private String errorString = ""; ImportIDXWorker(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, @@ -365,6 +382,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return this.getProgress(); } + @Override + public String getError(){ + return errorString; + } + @Override protected Void doInBackground() throws Exception { From 7a3b009747822fcb525095b764ef688393ac6d0f Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Thu, 9 Nov 2017 07:40:28 -0500 Subject: [PATCH 04/17] Refactoring --- .../ImportCentralRepoDbProgressDialog.java | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 8f83a5e3b4..ef81083e40 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -139,6 +139,55 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed(); } + abstract class CentralRepoImportWorker2 extends SwingWorker{ + private final int HASH_IMPORT_THRESHOLD = 10000; + private final String hashSetName; + private final String version; + private final int orgId; + private final boolean searchDuringIngest; + private final boolean sendIngestMessages; + private final HashDbManager.HashDb.KnownFilesType knownFilesType; + private final boolean readOnly; + private final File importFile; + private long totalLines; + private int crIndex = -1; + private HashDbManager.CentralRepoHashDb newHashDb = null; + private final AtomicLong numLines = new AtomicLong(); + private String errorString = ""; + + CentralRepoImportWorker2(String hashSetName, String version, int orgId, + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, File importFile){ + + this.hashSetName = hashSetName; + this.version = version; + this.orgId = orgId; + this.searchDuringIngest = searchDuringIngest; + this.sendIngestMessages = sendIngestMessages; + this.knownFilesType = knownFilesType; + this.readOnly = readOnly; + this.importFile = importFile; + this.numLines.set(0); + } + + HashDbManager.HashDatabase getDatabase(){ + return newHashDb; + } + + long getLinesProcessed(){ + return numLines.get(); + } + + int getProgressPercentage(){ + return this.getProgress(); + } + + String getError(){ + return errorString; + } + + } + private interface CentralRepoImportWorker{ void execute(); From 4b6a9cb4a024f50621bb012f202500b0696e7cba Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Thu, 9 Nov 2017 08:09:16 -0500 Subject: [PATCH 05/17] Refactored the database import workers --- .../ImportCentralRepoDbProgressDialog.java | 324 +++++------------- 1 file changed, 92 insertions(+), 232 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 0389cd3460..281583a4bf 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -111,7 +111,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return null; } - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= lines processed"}) + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= hashes processed"}) @Override public void propertyChange(PropertyChangeEvent evt) { @@ -139,23 +139,23 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed(); } - abstract class CentralRepoImportWorker2 extends SwingWorker{ - private final int HASH_IMPORT_THRESHOLD = 10000; - private final String hashSetName; - private final String version; - private final int orgId; - private final boolean searchDuringIngest; - private final boolean sendIngestMessages; - private final HashDbManager.HashDb.KnownFilesType knownFilesType; - private final boolean readOnly; - private final File importFile; - private long totalLines; - private int crIndex = -1; - private HashDbManager.CentralRepoHashDb newHashDb = null; - private final AtomicLong numLines = new AtomicLong(); - private String errorString = ""; + abstract class CentralRepoImportWorker extends SwingWorker{ + final int HASH_IMPORT_THRESHOLD = 10000; + final String hashSetName; + final String version; + final int orgId; + final boolean searchDuringIngest; + final boolean sendIngestMessages; + final HashDbManager.HashDb.KnownFilesType knownFilesType; + final boolean readOnly; + final File importFile; + long totalHashes = 1; + int referenceSetID = -1; + HashDbManager.CentralRepoHashDb newHashDb = null; + final AtomicLong numLines = new AtomicLong(); + String errorString = ""; - CentralRepoImportWorker2(String hashSetName, String version, int orgId, + CentralRepoImportWorker(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean readOnly, File importFile){ @@ -186,88 +186,88 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return errorString; } - } - - private interface CentralRepoImportWorker{ + /** + * Should be called in the constructor to set the max number of hashes. + * The value can be updated later after parsing the import file. + */ + abstract void setEstimatedTotalHashes(); - void execute(); - boolean cancel(boolean mayInterruptIfRunning); - void addPropertyChangeListener(PropertyChangeListener dialog); - int getProgressPercentage(); - long getLinesProcessed(); - HashDbManager.HashDatabase getDatabase(); - String getError(); + void deleteIncompleteSet(){ + if(referenceSetID >= 0){ + + // This can be slow on large reference sets + Executors.newSingleThreadExecutor().execute(new Runnable() { + @Override + public void run() { + try{ + EamDb.getInstance().deleteReferenceSet(referenceSetID); + } catch (EamDbException ex2){ + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); + } + } + }); + } + } + + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importHashsetError=Error importing hash set", + "ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"}) + @Override + protected void done() { + + if(isCancelled()){ + // If the user hit cancel, delete this incomplete hash set from the central repo + deleteIncompleteSet(); + return; + } + + try { + get(); + try{ + newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, + referenceSetID, + searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); + } catch (TskCoreException ex){ + JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message()); + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); + } + } catch (Exception ex) { + // Delete this incomplete hash set from the central repo + deleteIncompleteSet(); + errorString = Bundle.ImportCentralRepoDbProgressDialog_importHashsetError(); + } + } + } - class ImportEncaseWorker extends SwingWorker implements CentralRepoImportWorker{ - private final int HASH_IMPORT_THRESHOLD = 10000; - private final String hashSetName; - private final String version; - private final int orgId; - private final boolean searchDuringIngest; - private final boolean sendIngestMessages; - private final HashDbManager.HashDb.KnownFilesType knownFilesType; - private final boolean readOnly; - private final File importFile; - private final long totalLines; - private int crIndex = -1; - private HashDbManager.CentralRepoHashDb newHashDb = null; - private final AtomicLong numLines = new AtomicLong(); - private String errorString = ""; + class ImportEncaseWorker extends CentralRepoImportWorker{ ImportEncaseWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, File importFile){ + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, File importFile){ + super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); - this.hashSetName = hashSetName; - this.version = version; - this.orgId = orgId; - this.searchDuringIngest = searchDuringIngest; - this.sendIngestMessages = sendIngestMessages; - this.knownFilesType = knownFilesType; - this.readOnly = readOnly; - this.importFile = importFile; - this.numLines.set(0); - - this.totalLines = getEstimatedTotalHashes(); + setEstimatedTotalHashes(); } + /** * Encase files have a 0x480 byte header, then each hash takes 18 bytes * @return Approximate number of hashes in the file */ - final long getEstimatedTotalHashes(){ + @Override + final void setEstimatedTotalHashes(){ long fileSize = importFile.length(); if(fileSize < 0x492){ - return 1; // There's room for at most one hash + totalHashes = 1; // There's room for at most one hash } - return ((fileSize - 0x492) / 18); - } - - @Override - public HashDbManager.HashDatabase getDatabase(){ - return newHashDb; - } - - @Override - public long getLinesProcessed(){ - return numLines.get(); - } - - @Override - public int getProgressPercentage(){ - return this.getProgress(); - } - - @Override - public String getError(){ - return errorString; + totalHashes = (fileSize - 0x492) / 18; } @Override protected Void doInBackground() throws Exception { - EncaseHashSetParser encaseParser = new EncaseHashSetParser(this.importFile.getAbsolutePath()); + EncaseHashSetParser encaseParser = new EncaseHashSetParser(importFile.getAbsolutePath()); + totalHashes = encaseParser.getExpectedHashes(); TskData.FileKnown knownStatus; if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { @@ -277,7 +277,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } // Create an empty hashset in the central repository - crIndex = EamDb.getInstance().newReferenceSet(orgId, hashSetName, version, knownStatus, readOnly); + referenceSetID = EamDb.getInstance().newReferenceSet(orgId, hashSetName, version, knownStatus, readOnly); EamDb dbManager = EamDb.getInstance(); CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type @@ -293,7 +293,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P if(newHash != null){ EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - crIndex, + referenceSetID, newHash, knownStatus, ""); @@ -305,7 +305,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); globalInstances.clear(); - int progress = (int)(numLines.get() * 100 / totalLines); + int progress = (int)(numLines.get() * 100 / totalHashes); if(progress < 100){ this.setProgress(progress); } else { @@ -319,90 +319,17 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.setProgress(100); return null; } - - private void deleteIncompleteSet(int crIndex){ - if(crIndex >= 0){ - - // This can be slow on large reference sets - Executors.newSingleThreadExecutor().execute(new Runnable() { - @Override - public void run() { - try{ - EamDb.getInstance().deleteReferenceSet(crIndex); - } catch (EamDbException ex2){ - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); - } - } - }); - } - } - - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importError=Error importing hash set"}) - @Override - protected void done() { - - if(isCancelled()){ - // If the user hit cancel, delete this incomplete hash set from the central repo - deleteIncompleteSet(crIndex); - return; - } - - try { - get(); - try{ - newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, - crIndex, - searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); - } catch (TskCoreException ex){ - JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message()); - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); - } - } catch (Exception ex) { - // Delete this incomplete hash set from the central repo - if(crIndex >= 0){ - try{ - EamDb.getInstance().deleteReferenceSet(crIndex); - } catch (EamDbException ex2){ - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex); - } - } - errorString = Bundle.ImportCentralRepoDbProgressDialog_importError(); - } - } } - class ImportIDXWorker extends SwingWorker implements CentralRepoImportWorker{ - - private final int HASH_IMPORT_THRESHOLD = 10000; - private final String hashSetName; - private final String version; - private final int orgId; - private final boolean searchDuringIngest; - private final boolean sendIngestMessages; - private final HashDbManager.HashDb.KnownFilesType knownFilesType; - private final boolean readOnly; - private final File importFile; - private final long totalLines; - private int referenceSetID = -1; - private HashDbManager.CentralRepoHashDb newHashDb = null; - private final AtomicLong numLines = new AtomicLong(); - private String errorString = ""; + + class ImportIDXWorker extends CentralRepoImportWorker{ ImportIDXWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, File importFile){ + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, File importFile){ + super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); - this.hashSetName = hashSetName; - this.version = version; - this.orgId = orgId; - this.searchDuringIngest = searchDuringIngest; - this.sendIngestMessages = sendIngestMessages; - this.knownFilesType = knownFilesType; - this.readOnly = readOnly; - this.importFile = importFile; - this.numLines.set(0); - - this.totalLines = getEstimatedTotalHashes(); + setEstimatedTotalHashes(); } /** @@ -411,29 +338,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P * progress bar. * @return Approximate number of hashes in the file */ - final long getEstimatedTotalHashes(){ + @Override + final void setEstimatedTotalHashes(){ long fileSize = importFile.length(); - return (fileSize / 0x33 + 1); // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero - } - - @Override - public HashDbManager.HashDatabase getDatabase(){ - return newHashDb; - } - - @Override - public long getLinesProcessed(){ - return numLines.get(); - } - - @Override - public int getProgressPercentage(){ - return this.getProgress(); - } - - @Override - public String getError(){ - return errorString; + totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero } @Override @@ -480,7 +388,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); globalInstances.clear(); - int progress = (int)(numLines.get() * 100 / totalLines); + int progress = (int)(numLines.get() * 100 / totalHashes); if(progress < 100){ this.setProgress(progress); } else { @@ -494,54 +402,6 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return null; } - - private void deleteIncompleteSet(int idToDelete){ - if(idToDelete >= 0){ - - // This can be slow on large reference sets - Executors.newSingleThreadExecutor().execute(new Runnable() { - @Override - public void run() { - try{ - EamDb.getInstance().deleteReferenceSet(idToDelete); - } catch (EamDbException ex2){ - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); - } - } - }); - } - } - - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"}) - @Override - protected void done() { - if(isCancelled()){ - // If the user hit cancel, delete this incomplete hash set from the central repo - deleteIncompleteSet(referenceSetID); - return; - } - - try { - get(); - try{ - newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, - referenceSetID, - searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); - } catch (TskCoreException ex){ - JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message()); - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); - } - } catch (Exception ex) { - // Delete this incomplete hash set from the central repo - if(referenceSetID >= 0){ - try{ - EamDb.getInstance().deleteReferenceSet(referenceSetID); - } catch (EamDbException ex2){ - Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex); - } - } - } - } } /** From 3ab8a95165becd7676221012ade1af2f7c806830 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Fri, 10 Nov 2017 12:44:48 -0500 Subject: [PATCH 06/17] Enable any central repo has sets the user personally creates by default. --- .../modules/hashdatabase/HashDbManager.java | 45 ++++++++++++++++++- .../hashdatabase/HashLookupSettingsPanel.java | 19 ++++---- 2 files changed, 53 insertions(+), 11 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java index cd0b1339b7..8d03c4e37a 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java @@ -46,6 +46,7 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamGlobalSet; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; +import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.modules.hashdatabase.HashLookupSettings.HashDbInfo; import org.sleuthkit.datamodel.AbstractFile; @@ -70,6 +71,8 @@ public class HashDbManager implements PropertyChangeListener { PropertyChangeSupport changeSupport = new PropertyChangeSupport(HashDbManager.class); private static final Logger logger = Logger.getLogger(HashDbManager.class.getName()); private boolean allDatabasesLoadedCorrectly = false; + private static final String CENTRAL_REPO_HASH_SET_SETTINGS = "CentralRepoHashSets"; + private static final String CENTRAL_REPO_HASH_SET_LOCAL_KEY = "LocallyCreatedHashsets"; /** * Property change event support In events: For both of these enums, the old @@ -697,6 +700,44 @@ public class HashDbManager implements PropertyChangeListener { } } } + + /** + * Save any newly created central repo databases to the properties file. + * @param newHashSets + */ + static void saveNewCentralRepoDatabases(List newHashSets){ + + if(! newHashSets.isEmpty()){ + String newDbs = ""; + for(CentralRepoHashDb db:newHashSets){ + newDbs += makeCentralRepoHashSetString(db); + } + String oldSetting = ModuleSettings.getConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY); + String newSetting = ""; + if((oldSetting != null) && (! oldSetting.isEmpty())){ + newSetting = oldSetting; + } + newSetting += newDbs; + ModuleSettings.setConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY, newSetting); + } + } + + /** + * Check whether a given central repository hash set was created on this machine. + * @return true if it was created on this machine, false otherwise + */ + static boolean centralRepoWasCreatedLocally(CentralRepoHashDb db){ + String setting = ModuleSettings.getConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY); + String dbStr = makeCentralRepoHashSetString(db); + if(setting == null){ + return false; + } + return setting.contains(dbStr); + } + + private static String makeCentralRepoHashSetString(CentralRepoHashDb db){ + return "|" + db.getReferenceSetID() + "." + db.getHashSetName() + "." + db.getVersion() + "|"; + } private boolean hashDbInfoIsNew(HashDbInfo dbInfo){ for(HashDatabase db:this.hashSets){ @@ -1258,8 +1299,8 @@ public class HashDbManager implements PropertyChangeListener { @Override public boolean getDefaultSearchDuringIngest(){ - // Central repo hash sets are off by default - return false; + // Central repo hash sets are off by default, unless created on this machine + return centralRepoWasCreatedLocally(this); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index 0a299a6e87..4b40c60386 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -27,7 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import javax.swing.JComponent; -import javax.swing.JLabel; import javax.swing.JOptionPane; import javax.swing.JTable; import javax.swing.ListSelectionModel; @@ -66,7 +65,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan .getMessage(HashLookupSettingsPanel.class, "HashDbConfigPanel.errorGettingIndexStatusText"); private final HashDbManager hashSetManager = HashDbManager.getInstance(); private final HashSetTableModel hashSetTableModel = new HashSetTableModel(); - private final List newReferenceSetIDs = new ArrayList<>(); + private final List newReferenceSets = new ArrayList<>(); public HashLookupSettingsPanel() { initComponents(); @@ -328,7 +327,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan try { hashSetManager.save(); - newReferenceSetIDs.clear(); + HashDbManager.getInstance().saveNewCentralRepoDatabases(newReferenceSets); + newReferenceSets.clear(); } catch (HashDbManager.HashDbManagerException ex) { SwingUtilities.invokeLater(() -> { JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE); @@ -355,10 +355,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan */ if (IngestManager.getInstance().isIngestRunning() == false) { // Remove any new central repo hash sets from the database - for(int refID:newReferenceSetIDs){ + for(CentralRepoHashDb db:newReferenceSets){ try{ if(EamDb.isEnabled()){ - EamDb.getInstance().deleteReferenceSet(refID); + EamDb.getInstance().deleteReferenceSet(db.getReferenceSetID()); } else { // This is the case where the user imported a database, then switched over to the central // repo panel and disabled it before cancelling. We can't delete the database at this point. @@ -368,6 +368,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS } } + newReferenceSets.clear(); HashDbManager.getInstance().loadLastSavedConfiguration(); } @@ -922,8 +923,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan HashDatabase hashDb = new HashDbCreateDatabaseDialog().getHashDatabase(); if (null != hashDb) { if(hashDb instanceof CentralRepoHashDb){ - int newDbIndex = ((CentralRepoHashDb)hashDb).getReferenceSetID(); - newReferenceSetIDs.add(newDbIndex); + CentralRepoHashDb crDb = (CentralRepoHashDb)hashDb; + newReferenceSets.add(crDb); } hashSetTableModel.refreshModel(); @@ -976,8 +977,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan HashDatabase hashDb = new HashDbImportDatabaseDialog().getHashDatabase(); if (null != hashDb) { if(hashDb instanceof CentralRepoHashDb){ - int newReferenceSetID = ((CentralRepoHashDb)hashDb).getReferenceSetID(); - newReferenceSetIDs.add(newReferenceSetID); + CentralRepoHashDb crDb = (CentralRepoHashDb)hashDb; + newReferenceSets.add(crDb); } hashSetTableModel.refreshModel(); From 3cacda1d22c7347d766e2444550cf06acdc1e77f Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Mon, 20 Nov 2017 08:43:26 -0500 Subject: [PATCH 07/17] Remove default version --- .../autopsy/centralrepository/datamodel/EamDb.java | 8 -------- .../modules/hashdatabase/HashDbCreateDatabaseDialog.java | 8 ++++---- .../modules/hashdatabase/HashDbImportDatabaseDialog.java | 2 +- 3 files changed, 5 insertions(+), 13 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java index 1011f837cc..d6bee5137f 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java @@ -94,14 +94,6 @@ public interface EamDb { return EamDbUtil.useCentralRepo() && EamDbPlatformEnum.getSelectedPlatform() != EamDbPlatformEnum.DISABLED; } - - /** - * Placeholder version to use for non-read only databases - * @return The version that will be stored in the database - */ - static String getDefaultVersion() { - return ""; - } /** * Get the list of tags recognized as "Bad" diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java index 0ecb3a5ad4..17a69930a2 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbCreateDatabaseDialog.java @@ -423,7 +423,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { }//GEN-LAST:event_saveAsButtonActionPerformed @NbBundle.Messages({"HashDbCreateDatabaseDialog.missingOrg=An organization must be selected", - "HashDbCreateDatabaseDialog.duplicateName=A hashset with this name and version already exists", + "HashDbCreateDatabaseDialog.duplicateName=A hashset with this name already exists", "HashDbCreateDatabaseDialog.databaseLookupError=Error accessing central repository", "HashDbCreateDatabaseDialog.databaseCreationError=Error creating new hash set" }) @@ -500,7 +500,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { } else { // Check if a hash set with the same name/version already exists try{ - if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), EamDb.getDefaultVersion())){ + if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), "")){ JOptionPane.showMessageDialog(this, NbBundle.getMessage(this.getClass(), "HashDbCreateDatabaseDialog.duplicateName"), @@ -522,9 +522,9 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog { try{ int referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(selectedOrg.getOrgID(), hashSetNameTextField.getText(), - EamDb.getDefaultVersion(), fileKnown, false)); + "", fileKnown, false)); newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetNameTextField.getText(), - EamDb.getDefaultVersion(), referenceSetID, + "", referenceSetID, true, sendIngestMessagesCheckbox.isSelected(), type, false); } catch (EamDbException | TskCoreException ex){ Logger.getLogger(HashDbImportDatabaseDialog.class.getName()).log(Level.SEVERE, "Error creating new reference set", ex); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 8b26330e45..a91ea45a16 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -557,7 +557,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { version = versionTextField.getText(); } else { // Editable databases don't have a version - version = EamDb.getDefaultVersion(); + version = ""; } ImportCentralRepoDbProgressDialog progressDialog = new ImportCentralRepoDbProgressDialog(); progressDialog.importFile(hashSetNameTextField.getText(), version, From bb1abf07e43c1b9ceef69fe4e1bb3bcd7324eeaa Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Mon, 20 Nov 2017 08:49:59 -0500 Subject: [PATCH 08/17] Clear out new reference set ID list after save --- .../autopsy/modules/hashdatabase/HashLookupSettingsPanel.java | 1 + 1 file changed, 1 insertion(+) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index b5c6ca805a..e7bfcecf1e 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -335,6 +335,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } else { try { hashSetManager.save(); + newReferenceSetIDs.clear(); } catch (HashDbManager.HashDbManagerException ex) { SwingUtilities.invokeLater(() -> { JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE); From cd6d0ca14c63a54cddc3e83f293920bccd548e90 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Mon, 20 Nov 2017 08:58:49 -0500 Subject: [PATCH 09/17] Cleanup --- .../core.jar/org/netbeans/core/startup/Bundle.properties | 4 ++-- .../org/netbeans/core/windows/view/ui/Bundle.properties | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 2922cd2054..0de39782ca 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 07 Sep 2017 13:53:53 -0400 +#Wed, 08 Nov 2017 17:45:11 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 @@ -8,4 +8,4 @@ SplashRunningTextBounds=0,289,538,18 SplashRunningTextColor=0x0 SplashRunningTextFontSize=19 -currentVersion=Autopsy 4.4.2 +currentVersion=Autopsy 4.5.0 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 2ac51b0cbd..fa55dddb62 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 07 Sep 2017 13:53:53 -0400 -CTL_MainWindow_Title=Autopsy 4.4.2 -CTL_MainWindow_Title_No_Project=Autopsy 4.4.2 +#Wed, 08 Nov 2017 17:45:11 -0500 +CTL_MainWindow_Title=Autopsy 4.5.0 +CTL_MainWindow_Title_No_Project=Autopsy 4.5.0 From b230a3c63d3b10d0d9dd6063234b0561853fe9de Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Mon, 20 Nov 2017 09:12:01 -0500 Subject: [PATCH 10/17] Remove central repo hashset name properties file --- .../modules/hashdatabase/HashDbManager.java | 52 ------------------- 1 file changed, 52 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java index 8d03c4e37a..975f36b993 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java @@ -700,44 +700,6 @@ public class HashDbManager implements PropertyChangeListener { } } } - - /** - * Save any newly created central repo databases to the properties file. - * @param newHashSets - */ - static void saveNewCentralRepoDatabases(List newHashSets){ - - if(! newHashSets.isEmpty()){ - String newDbs = ""; - for(CentralRepoHashDb db:newHashSets){ - newDbs += makeCentralRepoHashSetString(db); - } - String oldSetting = ModuleSettings.getConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY); - String newSetting = ""; - if((oldSetting != null) && (! oldSetting.isEmpty())){ - newSetting = oldSetting; - } - newSetting += newDbs; - ModuleSettings.setConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY, newSetting); - } - } - - /** - * Check whether a given central repository hash set was created on this machine. - * @return true if it was created on this machine, false otherwise - */ - static boolean centralRepoWasCreatedLocally(CentralRepoHashDb db){ - String setting = ModuleSettings.getConfigSetting(CENTRAL_REPO_HASH_SET_SETTINGS, CENTRAL_REPO_HASH_SET_LOCAL_KEY); - String dbStr = makeCentralRepoHashSetString(db); - if(setting == null){ - return false; - } - return setting.contains(dbStr); - } - - private static String makeCentralRepoHashSetString(CentralRepoHashDb db){ - return "|" + db.getReferenceSetID() + "." + db.getHashSetName() + "." + db.getVersion() + "|"; - } private boolean hashDbInfoIsNew(HashDbInfo dbInfo){ for(HashDatabase db:this.hashSets){ @@ -810,8 +772,6 @@ public class HashDbManager implements PropertyChangeListener { public HashDb.KnownFilesType getKnownFilesType(); public boolean getSearchDuringIngest(); - - public boolean getDefaultSearchDuringIngest(); void setSearchDuringIngest(boolean useForIngest); @@ -985,12 +945,6 @@ public class HashDbManager implements PropertyChangeListener { public boolean getSearchDuringIngest() { return searchDuringIngest; } - - @Override - public boolean getDefaultSearchDuringIngest(){ - // File type hash sets are on by default - return true; - } @Override public void setSearchDuringIngest(boolean useForIngest) { @@ -1296,12 +1250,6 @@ public class HashDbManager implements PropertyChangeListener { public boolean getSearchDuringIngest() { return searchDuringIngest; } - - @Override - public boolean getDefaultSearchDuringIngest(){ - // Central repo hash sets are off by default, unless created on this machine - return centralRepoWasCreatedLocally(this); - } @Override public void setSearchDuringIngest(boolean useForIngest) { From 35048dc10a9a66550fd5304c3e97c9ced7f96069 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Mon, 20 Nov 2017 13:45:09 -0500 Subject: [PATCH 11/17] Cleanup --- .../autopsy/modules/hashdatabase/HashDbManager.java | 3 --- .../modules/hashdatabase/HashLookupModuleSettings.java | 2 +- .../core.jar/org/netbeans/core/startup/Bundle.properties | 4 ++-- .../org/netbeans/core/windows/view/ui/Bundle.properties | 6 +++--- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java index 5d7a776090..34770aa6fa 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbManager.java @@ -46,7 +46,6 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.EamGlobalSet; import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; -import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.modules.hashdatabase.HashLookupSettings.HashDbInfo; import org.sleuthkit.datamodel.AbstractFile; @@ -71,8 +70,6 @@ public class HashDbManager implements PropertyChangeListener { PropertyChangeSupport changeSupport = new PropertyChangeSupport(HashDbManager.class); private static final Logger logger = Logger.getLogger(HashDbManager.class.getName()); private boolean allDatabasesLoadedCorrectly = false; - private static final String CENTRAL_REPO_HASH_SET_SETTINGS = "CentralRepoHashSets"; - private static final String CENTRAL_REPO_HASH_SET_LOCAL_KEY = "LocallyCreatedHashsets"; /** * Property change event support In events: For both of these enums, the old diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java index 04d6d0f143..99dd50d291 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupModuleSettings.java @@ -135,7 +135,7 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings { } } - // We didn't find it, so use whatever default value is in the HashDb object + // We didn't find it, so use the value in the HashDb object return db.getSearchDuringIngest(); } diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties index 2922cd2054..0de39782ca 100644 --- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties +++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -1,5 +1,5 @@ #Updated by build script -#Thu, 07 Sep 2017 13:53:53 -0400 +#Wed, 08 Nov 2017 17:45:11 -0500 LBL_splash_window_title=Starting Autopsy SPLASH_HEIGHT=314 SPLASH_WIDTH=538 @@ -8,4 +8,4 @@ SplashRunningTextBounds=0,289,538,18 SplashRunningTextColor=0x0 SplashRunningTextFontSize=19 -currentVersion=Autopsy 4.4.2 +currentVersion=Autopsy 4.5.0 diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties index 2ac51b0cbd..fa55dddb62 100644 --- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties +++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties @@ -1,4 +1,4 @@ #Updated by build script -#Thu, 07 Sep 2017 13:53:53 -0400 -CTL_MainWindow_Title=Autopsy 4.4.2 -CTL_MainWindow_Title_No_Project=Autopsy 4.4.2 +#Wed, 08 Nov 2017 17:45:11 -0500 +CTL_MainWindow_Title=Autopsy 4.5.0 +CTL_MainWindow_Title_No_Project=Autopsy 4.5.0 From 49a631f351a276a08910a89e4796edaad133eecd Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 21 Nov 2017 11:17:52 -0500 Subject: [PATCH 12/17] Refactoring hash set import --- .../hashdatabase/EncaseHashSetParser.java | 83 ++-- .../HashDbImportDatabaseDialog.java | 5 +- .../modules/hashdatabase/HashSetParser.java | 49 +++ .../hashdatabase/IdxHashSetParser.java | 113 ++++++ .../ImportCentralRepoDbProgressDialog.java | 358 ++++++------------ 5 files changed, 322 insertions(+), 286 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java create mode 100644 Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index df9d78b7a3..9d2d4709be 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -22,31 +22,24 @@ import java.io.InputStream; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.IOException; -import java.lang.StringBuilder; import java.util.Arrays; -import java.util.List; -import java.util.ArrayList; import java.util.logging.Level; -import javax.swing.JOptionPane; import org.openide.util.NbBundle; -import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; -class EncaseHashSetParser { - final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, +class EncaseHashSetParser implements HashSetParser { + private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, (byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00}; - InputStream inputStream; - final int expectedHashes; - int totalHashesRead = 0; + private InputStream inputStream; + private final long expectedHashCount; + private int totalHashesRead = 0; /** * Opens the import file and parses the header. * @param filename The Encase hashset * @throws TskCoreException There was an error opening/reading the file or it is not the correct format */ - @NbBundle.Messages({"EncaseHashSetParser.fileOpenError.text=Error reading import file", - "EncaseHashSetParser.wrongFormat.text=Hashset is not Encase format"}) EncaseHashSetParser(String filename) throws TskCoreException{ try{ inputStream = new BufferedInputStream(new FileInputStream(filename)); @@ -55,16 +48,14 @@ class EncaseHashSetParser { byte[] header = new byte[16]; readBuffer(header, 16); if(! Arrays.equals(header, encaseHeader)){ - displayError(NbBundle.getMessage(this.getClass(), - "EncaseHashSetParser.wrongFormat.text")); close(); throw new TskCoreException("File " + filename + " does not have an Encase header"); } - // Read in the expected number of hashes + // Read in the expected number of hashes (little endian) byte[] sizeBuffer = new byte[4]; readBuffer(sizeBuffer, 4); - expectedHashes = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16) + expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16) | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); // Read in a bunch of nulls @@ -80,8 +71,6 @@ class EncaseHashSetParser { readBuffer(typeBuffer, 0x28); } catch (IOException ex){ - displayError(NbBundle.getMessage(this.getClass(), - "EncaseHashSetParser.fileOpenError.text")); close(); throw new TskCoreException("Error reading " + filename, ex); } catch (TskCoreException ex){ @@ -90,21 +79,34 @@ class EncaseHashSetParser { } } - int getExpectedHashes(){ - return expectedHashes; + /** + * Get the expected number of hashes in the file. + * This number can be an estimate. + * @return The expected hash count + */ + @Override + public long getExpectedHashCount(){ + return expectedHashCount; } - synchronized boolean doneReading(){ - if(inputStream == null){ - return true; - } - - return(totalHashesRead >= expectedHashes); + /** + * Check if there are more hashes to read + * @return true if we've read all expected hash values, false otherwise + */ + @Override + public boolean doneReading(){ + return(totalHashesRead >= expectedHashCount); } - synchronized String getNextHash() throws TskCoreException{ + /** + * Get the next hash to import + * @return The hash as a string, or null if the end of file was reached without error + * @throws TskCoreException + */ + @Override + public String getNextHash() throws TskCoreException{ if(inputStream == null){ - return null; + throw new TskCoreException("Attempting to read from null inputStream"); } byte[] hashBytes = new byte[16]; @@ -122,14 +124,16 @@ class EncaseHashSetParser { totalHashesRead++; return sb.toString(); } catch (IOException ex){ - // Log it and return what we've got Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Ran out of data while reading Encase hash sets", ex); - close(); throw new TskCoreException("Error reading hash", ex); } } - synchronized final void close(){ + /** + * Closes the import file + */ + @Override + public final void close(){ if(inputStream != null){ try{ inputStream.close(); @@ -142,26 +146,13 @@ class EncaseHashSetParser { } @NbBundle.Messages({"EncaseHashSetParser.outOfData.text=Ran out of data while parsing file"}) - private synchronized void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { + private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { if(inputStream == null){ throw new TskCoreException("readBuffer called on null inputStream"); } if(length != inputStream.read(buffer)){ - displayError(NbBundle.getMessage(this.getClass(), - "EncaseHashSetParser.outOfData.text")); close(); - throw new TskCoreException("Ran out of data while parsing Encase file"); - } - } - - @NbBundle.Messages({"EncaseHashSetParser.error.title=Error importing Encase hashset"}) - private void displayError(String errorText){ - if(RuntimeProperties.runningWithGUI()){ - JOptionPane.showMessageDialog(null, - errorText, - NbBundle.getMessage(this.getClass(), - "EncaseHashSetParser.error.title"), - JOptionPane.ERROR_MESSAGE); + throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file"); } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index a91ea45a16..670b7e8d19 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -447,7 +447,8 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { @NbBundle.Messages({"HashDbImportDatabaseDialog.missingVersion=A version must be entered", "HashDbImportDatabaseDialog.missingOrg=An organization must be selected", "HashDbImportDatabaseDialog.duplicateName=A hashset with this name and version already exists", - "HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository" + "HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository", + "HashDbImportDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered." }) private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed // Note that the error handlers in this method call return without disposing of the @@ -456,7 +457,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { if (hashSetNameTextField.getText().isEmpty()) { JOptionPane.showMessageDialog(this, NbBundle.getMessage(this.getClass(), - "HashDbCreateDatabaseDialog.mustEnterHashSetNameMsg"), + "HashDbImportDatabaseDialog.mustEnterHashSetNameMsg"), NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.importHashDbErr"), JOptionPane.ERROR_MESSAGE); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java new file mode 100644 index 0000000000..fc45856af9 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java @@ -0,0 +1,49 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.hashdatabase; + +import org.sleuthkit.datamodel.TskCoreException; + +interface HashSetParser { + + /** + * Get the next hash to import + * @return The hash as a string, or null if the end of file was reached without error + * @throws TskCoreException + */ + String getNextHash() throws TskCoreException; + + /** + * Check if there are more hashes to read + * @return true if we've read all expected hash values, false otherwise + */ + boolean doneReading(); + + /** + * Get the expected number of hashes in the file. + * This number can be an estimate. + * @return The expected hash count + */ + long getExpectedHashCount(); + + /** + * Closes the import file + */ + void close(); +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java new file mode 100644 index 0000000000..9176002eda --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java @@ -0,0 +1,113 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.hashdatabase; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Parser for idx files + */ +class IdxHashSetParser implements HashSetParser { + private String filename; + private BufferedReader reader; + private final long totalHashes; + private boolean doneReading = false; + + IdxHashSetParser(String filename) throws TskCoreException{ + this.filename = filename; + try{ + reader = new BufferedReader(new FileReader(filename)); + } catch (FileNotFoundException ex){ + throw new TskCoreException("Error opening file " + filename, ex); + } + + // Estimate the total number of hashes in the file since counting them all can be slow + File importFile = new File(filename); + long fileSize = importFile.length(); + totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero + } + + /** + * Get the next hash to import + * @return The hash as a string, or null if the end of file was reached without error + * @throws TskCoreException + */ + @Override + public String getNextHash() throws TskCoreException { + String line; + + try{ + while ((line = reader.readLine()) != null) { + + String[] parts = line.split("\\|"); + + // Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash + if (parts.length != 2 || parts[0].length() == 41) { + continue; + } + + return parts[0].toLowerCase(); + } + } catch (IOException ex){ + throw new TskCoreException("Error reading file " + filename, ex); + } + + // We've run out of data + doneReading = true; + return null; + } + + /** + * Check if there are more hashes to read + * @return true if we've read all expected hash values, false otherwise + */ + @Override + public boolean doneReading() { + return doneReading; + } + + /** + * Get the expected number of hashes in the file. + * This number can be an estimate. + * @return The expected hash count + */ + @Override + public long getExpectedHashCount() { + return totalHashes; + } + + /** + * Closes the import file + */ + @Override + public void close() { + try{ + reader.close(); + } catch (IOException ex){ + Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex); + } + } +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 240e4c19a5..9712edf904 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -19,12 +19,8 @@ package org.sleuthkit.autopsy.modules.hashdatabase; import java.awt.Color; -import java.awt.Cursor; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeEvent; -import java.io.BufferedReader; -import java.io.File; -import java.io.FileReader; import java.util.HashSet; import java.util.Set; import java.util.logging.Level; @@ -32,8 +28,8 @@ import javax.swing.JFrame; import javax.swing.SwingWorker; import javax.swing.WindowConstants; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.Executors; -import javax.swing.JOptionPane; import org.openide.util.NbBundle; import org.openide.windows.WindowManager; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute; @@ -50,18 +46,8 @@ import org.sleuthkit.datamodel.TskData; */ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{ - private CentralRepoImportWorker worker; - - /** - * - * @param hashSetName - * @param version - * @param orgId - * @param searchDuringIngest - * @param sendIngestMessages - * @param knownFilesType - * @param importFile - */ + private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress", }) ImportCentralRepoDbProgressDialog() { @@ -78,29 +64,16 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P bnOk.setEnabled(false); } - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.unknownFormat.message=Hash set to import is an unknown format"}) void importFile(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, String importFileName){ - setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); - - File importFile = new File(importFileName); - if(importFileName.toLowerCase().endsWith(".idx")){ - worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, - knownFilesType, readOnly, importFile); - } else if(importFileName.toLowerCase().endsWith(".hash")){ - worker = new ImportEncaseWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, - knownFilesType, readOnly, importFile); - } else { - // We've gotten here with a format that can't be processed - JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_unknownFormat_message()); - return; - } + boolean readOnly, String importFileName){ + + worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, + knownFilesType, readOnly, importFileName); worker.addPropertyChangeListener(this); worker.execute(); setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow()); - setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR)); this.setVisible(true); } @@ -111,53 +84,67 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return null; } - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= hashes processed"}) + + /** + * Updates the dialog from events from the worker. + * The two events we handle are progress updates and + * the done event. + * @param evt + */ + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"}) @Override public void propertyChange(PropertyChangeEvent evt) { if("progress".equals(evt.getPropertyName())){ - progressBar.setValue(worker.getProgressPercentage()); + // The progress has been updated. Update the progress bar and text + progressBar.setValue(worker.getProgress()); lbProgress.setText(getProgressString()); } else if ("state".equals(evt.getPropertyName()) && (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) { - // Disable cancel and enable ok + + // The worker is done processing + // Disable cancel button and enable ok bnCancel.setEnabled(false); bnOk.setEnabled(true); - if(worker.getError().isEmpty()){ + if(worker.getImportSuccess()){ + // If the import succeeded, finish the progress bar and display the + // total number of imported hashes progressBar.setValue(progressBar.getMaximum()); lbProgress.setText(getProgressString()); } else { + // If there was an error, reset the progress bar and display an error message progressBar.setValue(0); lbProgress.setForeground(Color.red); - lbProgress.setText(worker.getError()); + lbProgress.setText(Bundle.ImportCentralRepoDbProgressDialog_errorParsingFile_message()); } } } + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"}) private String getProgressString(){ - return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed(); + return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message(); } - abstract class CentralRepoImportWorker extends SwingWorker{ - final int HASH_IMPORT_THRESHOLD = 10000; - final String hashSetName; - final String version; - final int orgId; - final boolean searchDuringIngest; - final boolean sendIngestMessages; - final HashDbManager.HashDb.KnownFilesType knownFilesType; - final boolean readOnly; - final File importFile; - long totalHashes = 1; - int referenceSetID = -1; - HashDbManager.CentralRepoHashSet newHashDb = null; - final AtomicLong numLines = new AtomicLong(); - String errorString = ""; + class CentralRepoImportWorker extends SwingWorker{ + private final int HASH_IMPORT_THRESHOLD = 10000; + private final String hashSetName; + private final String version; + private final int orgId; + private final boolean searchDuringIngest; + private final boolean sendIngestMessages; + private final HashDbManager.HashDb.KnownFilesType knownFilesType; + private final boolean readOnly; + private final String importFileName; + private long totalHashes = 1; + private int referenceSetID = -1; + private HashDbManager.CentralRepoHashSet newHashDb = null; + private final AtomicLong numLines = new AtomicLong(); + private final AtomicBoolean importSuccess = new AtomicBoolean(); CentralRepoImportWorker(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, File importFile){ + boolean readOnly, String importFileName){ this.hashSetName = hashSetName; this.version = version; @@ -166,11 +153,12 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.sendIngestMessages = sendIngestMessages; this.knownFilesType = knownFilesType; this.readOnly = readOnly; - this.importFile = importFile; + this.importFileName = importFileName; this.numLines.set(0); + this.importSuccess.set(false); } - HashDbManager.CentralRepoHashSet getDatabase(){ + synchronized HashDbManager.CentralRepoHashSet getDatabase(){ return newHashDb; } @@ -178,20 +166,81 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P return numLines.get(); } - int getProgressPercentage(){ - return this.getProgress(); + boolean getImportSuccess(){ + return importSuccess.get(); } - String getError(){ - return errorString; + @Override + protected Void doInBackground() throws Exception { + + // Create the hash set parser + HashSetParser hashSetParser; + if(importFileName.toLowerCase().endsWith(".idx")){ + hashSetParser = new IdxHashSetParser(importFileName); + } else + if(importFileName.toLowerCase().endsWith(".hash")){ + hashSetParser = new EncaseHashSetParser(importFileName); + } else { + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); + } + + try{ + totalHashes = hashSetParser.getExpectedHashCount(); + + TskData.FileKnown knownStatus; + if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { + knownStatus = TskData.FileKnown.KNOWN; + } else { + knownStatus = TskData.FileKnown.BAD; + } + + // Create an empty hashset in the central repository + referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)); + + EamDb dbManager = EamDb.getInstance(); + CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type + + Set globalInstances = new HashSet<>(); + + while (! hashSetParser.doneReading()) { + if(isCancelled()){ + return null; + } + + String newHash = hashSetParser.getNextHash(); + + if(newHash != null){ + EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( + referenceSetID, + newHash, + knownStatus, + ""); + + globalInstances.add(eamGlobalFileInstance); + + if(numLines.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){ + dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); + globalInstances.clear(); + + int progress = (int)(numLines.get() * 100 / totalHashes); + if(progress < 100){ + this.setProgress(progress); + } else { + this.setProgress(99); + } + } + } + } + + dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); + this.setProgress(100); + return null; + } finally { + hashSetParser.close(); + } } - /** - * Should be called in the constructor to set the max number of hashes. - * The value can be updated later after parsing the import file. - */ - abstract void setEstimatedTotalHashes(); - void deleteIncompleteSet(){ if(referenceSetID >= 0){ @@ -209,10 +258,8 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importHashsetError=Error importing hash set", - "ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"}) @Override - protected void done() { + synchronized protected void done() { if(isCancelled()){ // If the user hit cancel, delete this incomplete hash set from the central repo @@ -226,184 +273,19 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, referenceSetID, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); + importSuccess.set(true); } catch (TskCoreException ex){ - JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message()); Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); } } catch (Exception ex) { // Delete this incomplete hash set from the central repo deleteIncompleteSet(); - errorString = Bundle.ImportCentralRepoDbProgressDialog_importHashsetError(); + Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex); } } } - class ImportEncaseWorker extends CentralRepoImportWorker{ - - ImportEncaseWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, File importFile){ - super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); - - setEstimatedTotalHashes(); - } - - - /** - * Encase files have a 0x480 byte header, then each hash takes 18 bytes - * @return Approximate number of hashes in the file - */ - @Override - final void setEstimatedTotalHashes(){ - long fileSize = importFile.length(); - if(fileSize < 0x492){ - totalHashes = 1; // There's room for at most one hash - } - totalHashes = (fileSize - 0x492) / 18; - } - - @Override - protected Void doInBackground() throws Exception { - - EncaseHashSetParser encaseParser = new EncaseHashSetParser(importFile.getAbsolutePath()); - totalHashes = encaseParser.getExpectedHashes(); - - TskData.FileKnown knownStatus; - if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { - knownStatus = TskData.FileKnown.KNOWN; - } else { - knownStatus = TskData.FileKnown.BAD; - } - - // Create an empty hashset in the central repository - referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)); - - EamDb dbManager = EamDb.getInstance(); - CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type - - Set globalInstances = new HashSet<>(); - - while (! encaseParser.doneReading()) { - if(isCancelled()){ - return null; - } - - String newHash = encaseParser.getNextHash(); - - if(newHash != null){ - EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - referenceSetID, - newHash, - knownStatus, - ""); - - globalInstances.add(eamGlobalFileInstance); - numLines.incrementAndGet(); - - if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){ - dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); - globalInstances.clear(); - - int progress = (int)(numLines.get() * 100 / totalHashes); - if(progress < 100){ - this.setProgress(progress); - } else { - this.setProgress(99); - } - } - } - } - - dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); - this.setProgress(100); - return null; - } - } - - - class ImportIDXWorker extends CentralRepoImportWorker{ - - ImportIDXWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, File importFile){ - super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile); - - setEstimatedTotalHashes(); - } - - /** - * Doing an actual count of the number of lines in a large idx file (such - * as the nsrl) is slow, so just get something in the general area for the - * progress bar. - * @return Approximate number of hashes in the file - */ - @Override - final void setEstimatedTotalHashes(){ - long fileSize = importFile.length(); - totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero - } - - @Override - protected Void doInBackground() throws Exception { - - TskData.FileKnown knownStatus; - if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { - knownStatus = TskData.FileKnown.KNOWN; - } else { - knownStatus = TskData.FileKnown.BAD; - } - - // Create an empty hashset in the central repository - referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)); - - EamDb dbManager = EamDb.getInstance(); - CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type - BufferedReader reader = new BufferedReader(new FileReader(importFile)); - String line; - Set globalInstances = new HashSet<>(); - - while ((line = reader.readLine()) != null) { - if(isCancelled()){ - return null; - } - - String[] parts = line.split("\\|"); - - // Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash - if (parts.length != 2 || parts[0].length() == 41) { - continue; - } - - EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - referenceSetID, - parts[0].toLowerCase(), - knownStatus, - ""); - - globalInstances.add(eamGlobalFileInstance); - numLines.incrementAndGet(); - - if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){ - dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); - globalInstances.clear(); - - int progress = (int)(numLines.get() * 100 / totalHashes); - if(progress < 100){ - this.setProgress(progress); - } else { - this.setProgress(99); - } - } - } - - dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); - this.setProgress(100); - - return null; - } - } - /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always From cc5c98fcb06f6a978e008eaa18ae9f9ae3afa415 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 21 Nov 2017 13:15:04 -0500 Subject: [PATCH 13/17] Cleanup --- .../hashdatabase/EncaseHashSetParser.java | 26 +++--- .../hashdatabase/IdxHashSetParser.java | 10 +-- .../ImportCentralRepoDbProgressDialog.java | 79 ++++++++++++++----- 3 files changed, 79 insertions(+), 36 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index 9d2d4709be..cfc4b0b384 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -24,24 +24,30 @@ import java.io.FileInputStream; import java.io.IOException; import java.util.Arrays; import java.util.logging.Level; -import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; +/** + * Parser for Encase format hash sets (*.hash) + */ class EncaseHashSetParser implements HashSetParser { private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, (byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00}; - private InputStream inputStream; - private final long expectedHashCount; - private int totalHashesRead = 0; + private final String filename; // Name of the input file (saved for logging) + private InputStream inputStream; // File stream for file being imported + private final long expectedHashCount; // Number of hashes we expect to read from the file + private int totalHashesRead = 0; // Number of hashes that have been read /** * Opens the import file and parses the header. - * @param filename The Encase hashset + * If this is successful, the file will be set up to call getNextHash() to + * read the hash values. + * @param filename The Encase hash set * @throws TskCoreException There was an error opening/reading the file or it is not the correct format */ EncaseHashSetParser(String filename) throws TskCoreException{ try{ + this.filename = filename; inputStream = new BufferedInputStream(new FileInputStream(filename)); // Read in and test the 16 byte header @@ -70,6 +76,8 @@ class EncaseHashSetParser implements HashSetParser { byte[] typeBuffer = new byte[0x28]; readBuffer(typeBuffer, 0x28); + // At this point we're past the header and ready to read in the hashes + } catch (IOException ex){ close(); throw new TskCoreException("Error reading " + filename, ex); @@ -124,8 +132,7 @@ class EncaseHashSetParser implements HashSetParser { totalHashesRead++; return sb.toString(); } catch (IOException ex){ - Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Ran out of data while reading Encase hash sets", ex); - throw new TskCoreException("Error reading hash", ex); + throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex); } } @@ -138,21 +145,20 @@ class EncaseHashSetParser implements HashSetParser { try{ inputStream.close(); } catch (IOException ex){ - Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set", ex); + Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex); } finally { inputStream = null; } } } - @NbBundle.Messages({"EncaseHashSetParser.outOfData.text=Ran out of data while parsing file"}) private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { if(inputStream == null){ throw new TskCoreException("readBuffer called on null inputStream"); } if(length != inputStream.read(buffer)){ close(); - throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file"); + throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java index 9176002eda..af66b994e1 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java @@ -28,13 +28,13 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; /** - * Parser for idx files + * Parser for idx files (*.idx) */ class IdxHashSetParser implements HashSetParser { - private String filename; - private BufferedReader reader; - private final long totalHashes; - private boolean doneReading = false; + private final String filename; // Name of the input file (saved for logging) + private BufferedReader reader; // Input file + private final long totalHashes; // Estimated number of hashes + private boolean doneReading = false; // Flag for if we've hit the end of the file IdxHashSetParser(String filename) throws TskCoreException{ this.filename = filename; diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 9712edf904..12f62ba8b1 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -29,6 +29,7 @@ import javax.swing.SwingWorker; import javax.swing.WindowConstants; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.Executors; import org.openide.util.NbBundle; import org.openide.windows.WindowManager; @@ -42,7 +43,7 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * + * Imports a hash set into the central repository and updates a progress dialog */ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{ @@ -60,10 +61,24 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } private void customizeComponents(){ + // This is preventing the user from closing the dialog using the X setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); + bnOk.setEnabled(false); } + /** + * Import the selected hash set into the central repository. + * Will bring up a progress dialog while the import is in progress. + * @param hashSetName + * @param version + * @param orgId + * @param searchDuringIngest + * @param sendIngestMessages + * @param knownFilesType + * @param readOnly + * @param importFileName + */ void importFile(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean readOnly, String importFileName){ @@ -77,6 +92,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.setVisible(true); } + /** + * Get the HashDb object for the newly imported data. + * Should be called after importFile completes. + * @return The new HashDb object or null if the import failed/was canceled + */ HashDbManager.HashDb getDatabase(){ if(worker != null){ return worker.getDatabase(); @@ -123,10 +143,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"}) private String getProgressString(){ - return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message(); + return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message(); } - class CentralRepoImportWorker extends SwingWorker{ + private class CentralRepoImportWorker extends SwingWorker{ private final int HASH_IMPORT_THRESHOLD = 10000; private final String hashSetName; private final String version; @@ -136,10 +156,9 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private final HashDbManager.HashDb.KnownFilesType knownFilesType; private final boolean readOnly; private final String importFileName; - private long totalHashes = 1; - private int referenceSetID = -1; private HashDbManager.CentralRepoHashSet newHashDb = null; - private final AtomicLong numLines = new AtomicLong(); + private final AtomicInteger referenceSetID = new AtomicInteger(); + private final AtomicLong hashCount = new AtomicLong(); private final AtomicBoolean importSuccess = new AtomicBoolean(); CentralRepoImportWorker(String hashSetName, String version, int orgId, @@ -154,18 +173,31 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.knownFilesType = knownFilesType; this.readOnly = readOnly; this.importFileName = importFileName; - this.numLines.set(0); + this.hashCount.set(0); this.importSuccess.set(false); + this.referenceSetID.set(-1); } + /** + * Get the newly created database + * @return the imported database. May be null if an error occurred or the user canceled + */ synchronized HashDbManager.CentralRepoHashSet getDatabase(){ return newHashDb; } - long getLinesProcessed(){ - return numLines.get(); + /** + * Get the number of hashes that have been read in so far + * @return current hash count + */ + long getNumHashesProcessed(){ + return hashCount.get(); } + /** + * Check if the import was successful or if there was an error. + * @return true if the import process completed without error, false otherwise + */ boolean getImportSuccess(){ return importSuccess.get(); } @@ -186,8 +218,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } try{ - totalHashes = hashSetParser.getExpectedHashCount(); - + // Conver to the FileKnown enum used by EamGlobalSet TskData.FileKnown knownStatus; if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { knownStatus = TskData.FileKnown.KNOWN; @@ -196,11 +227,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } // Create an empty hashset in the central repository - referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)); - EamDb dbManager = EamDb.getInstance(); - CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type + referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly))); + // Get the "FILES" content type. This is a database lookup so we + // only want to do it once. + CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); + + // Holds the current batch of hashes that need to be written to the central repo Set globalInstances = new HashSet<>(); while (! hashSetParser.doneReading()) { @@ -212,18 +246,20 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P if(newHash != null){ EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - referenceSetID, + referenceSetID.get(), newHash, knownStatus, ""); globalInstances.add(eamGlobalFileInstance); - if(numLines.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){ + // If we've hit the threshold for writing the hashes, write them + // all to the central repo + if(hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){ dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); globalInstances.clear(); - int progress = (int)(numLines.get() * 100 / totalHashes); + int progress = (int)(hashCount.get() * 100 / hashSetParser.getExpectedHashCount()); if(progress < 100){ this.setProgress(progress); } else { @@ -233,6 +269,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } + // Add any remaining hashes to the central repo dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); this.setProgress(100); return null; @@ -241,15 +278,15 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } - void deleteIncompleteSet(){ - if(referenceSetID >= 0){ + private void deleteIncompleteSet(){ + if(referenceSetID.get() >= 0){ // This can be slow on large reference sets Executors.newSingleThreadExecutor().execute(new Runnable() { @Override public void run() { try{ - EamDb.getInstance().deleteReferenceSet(referenceSetID); + EamDb.getInstance().deleteReferenceSet(referenceSetID.get()); } catch (EamDbException ex2){ Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); } @@ -271,7 +308,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P get(); try{ newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, - referenceSetID, + referenceSetID.get(), searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); importSuccess.set(true); } catch (TskCoreException ex){ From 05615a4b15b022b21de70f624cb69c1c3ad83859 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 21 Nov 2017 14:43:50 -0500 Subject: [PATCH 14/17] Add kdb parser --- .../HashDbImportDatabaseDialog.java | 4 +- .../ImportCentralRepoDbProgressDialog.java | 5 +- .../hashdatabase/KdbHashSetParser.java | 147 ++++++++++++++++++ 3 files changed, 152 insertions(+), 4 deletions(-) create mode 100644 Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 670b7e8d19..f0055fc181 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -88,11 +88,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { fileChooser.setMultiSelectionEnabled(false); } - @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx or .hash only)"}) + @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.kdb, .idx or .hash)"}) private void updateFileChooserFilter() { fileChooser.resetChoosableFileFilters(); if(centralRepoRadioButton.isSelected()){ - String[] EXTENSION = new String[]{"hash", "Hash", "idx"}; //NON-NLS + String[] EXTENSION = new String[]{"kdb", "idx", "hash", "Hash"}; //NON-NLS FileNameExtensionFilter filter = new FileNameExtensionFilter( NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.centralRepoExtFilter.text"), EXTENSION); fileChooser.setFileFilter(filter); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 12f62ba8b1..0a6e6cf644 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -209,9 +209,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P HashSetParser hashSetParser; if(importFileName.toLowerCase().endsWith(".idx")){ hashSetParser = new IdxHashSetParser(importFileName); - } else - if(importFileName.toLowerCase().endsWith(".hash")){ + } else if(importFileName.toLowerCase().endsWith(".hash")){ hashSetParser = new EncaseHashSetParser(importFileName); + } else if(importFileName.toLowerCase().endsWith(".kdb")){ + hashSetParser = new KdbHashSetParser(importFileName); } else { // We've gotten here with a format that can't be processed throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java new file mode 100644 index 0000000000..944780936b --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -0,0 +1,147 @@ +/* + * To change this license header, choose License Headers in Project Properties. + * To change this template file, choose Tools | Templates + * and open the template in the editor. + */ +package org.sleuthkit.autopsy.modules.hashdatabase; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * + */ +public class KdbHashSetParser implements HashSetParser { + private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS + private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS + + private final String filename; // Name of the input file (saved for logging) + private final long totalHashes; // Estimated number of hashes + private int totalHashesRead = 0; // Number of hashes that have been read + private Connection conn; + private Statement statement; + private ResultSet resultSet; + + + KdbHashSetParser(String filename) throws TskCoreException{ + this.filename = filename; + + conn = null; + statement = null; + resultSet = null; + + try{ + // Open the database + StringBuilder connectionURL = new StringBuilder(); + connectionURL.append(JDBC_BASE_URI); + connectionURL.append(filename); + Class.forName(JDBC_DRIVER); + conn = DriverManager.getConnection(connectionURL.toString()); + + // Get the number of hashes in the table + statement = conn.createStatement(); + resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes"); + if (resultSet.next()) { + totalHashes = resultSet.getLong("count"); + } else { + close(); + throw new TskCoreException("Error getting hash count from database " + filename); + } + + // Get the hashes + resultSet = statement.executeQuery("SELECT md5 FROM hashes"); + + // At this point, getNextHash can read each hash from the result set + + } catch (ClassNotFoundException | SQLException ex){ + throw new TskCoreException("Error opening/reading database " + filename, ex); + } + + } + + /** + * Get the next hash to import + * @return The hash as a string, or null if the end of file was reached without error + * @throws TskCoreException + */ + @Override + public String getNextHash() throws TskCoreException { + + try{ + if(resultSet.next()){ + byte[] hashBytes = resultSet.getBytes("md5"); + StringBuilder sb = new StringBuilder(); + for (byte b : hashBytes) { + sb.append(String.format("%02x", b)); + } + + if(sb.toString().length() != 32){ + throw new TskCoreException("Hash has incorrect length: " + sb.toString()); + } + + totalHashesRead++; + return sb.toString(); + } else { + throw new TskCoreException("Could not read expected number of hashes from database " + filename); + } + } catch (SQLException ex){ + throw new TskCoreException("Error reading hash from result set for database " + filename, ex); + } + } + + /** + * Check if there are more hashes to read + * @return true if we've read all expected hash values, false otherwise + */ + @Override + public boolean doneReading() { + return(totalHashesRead >= totalHashes); + } + + /** + * Get the expected number of hashes in the file. + * This number can be an estimate. + * @return The expected hash count + */ + @Override + public long getExpectedHashCount() { + return totalHashes; + } + + /** + * Closes the import file + */ + @Override + public final void close() { + if(statement != null){ + try { + statement.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex); + } + } + + if(resultSet != null){ + try { + resultSet.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex); + } + } + + if(conn != null){ + try { + conn.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex); + } + } + } +} From 1c646aa184e06b3b593078a9fdaaf7eeb9157deb Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 21 Nov 2017 14:47:08 -0500 Subject: [PATCH 15/17] Fixed version check to only apply if hash set is read only --- .../modules/hashdatabase/HashDbImportDatabaseDialog.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 670b7e8d19..2937d53983 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -465,7 +465,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { } if(centralRepoRadioButton.isSelected()){ - if(versionTextField.getText().isEmpty()){ + if(readOnlyCheckbox.isSelected() && versionTextField.getText().isEmpty()){ JOptionPane.showMessageDialog(this, NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.missingVersion"), From 0057a18c49aea94c2b8944bb52631551f8a17493 Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Tue, 21 Nov 2017 15:41:09 -0500 Subject: [PATCH 16/17] Move clearing of list of new hash sets Formatting --- .../hashdatabase/EncaseHashSetParser.java | 93 ++++----- .../hashdatabase/HashLookupSettingsPanel.java | 112 +++++------ .../modules/hashdatabase/HashSetParser.java | 20 +- .../hashdatabase/IdxHashSetParser.java | 45 +++-- .../ImportCentralRepoDbProgressDialog.java | 179 +++++++++--------- 5 files changed, 236 insertions(+), 213 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index cfc4b0b384..8ea9bc3c46 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -31,95 +31,100 @@ import org.sleuthkit.datamodel.TskCoreException; * Parser for Encase format hash sets (*.hash) */ class EncaseHashSetParser implements HashSetParser { - private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, - (byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00}; + + private final byte[] encaseHeader = {(byte) 0x48, (byte) 0x41, (byte) 0x53, (byte) 0x48, (byte) 0x0d, (byte) 0x0a, (byte) 0xff, (byte) 0x00, + (byte) 0x02, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00}; private final String filename; // Name of the input file (saved for logging) private InputStream inputStream; // File stream for file being imported private final long expectedHashCount; // Number of hashes we expect to read from the file private int totalHashesRead = 0; // Number of hashes that have been read - + /** - * Opens the import file and parses the header. - * If this is successful, the file will be set up to call getNextHash() to - * read the hash values. + * Opens the import file and parses the header. If this is successful, the + * file will be set up to call getNextHash() to read the hash values. + * * @param filename The Encase hash set - * @throws TskCoreException There was an error opening/reading the file or it is not the correct format + * @throws TskCoreException There was an error opening/reading the file or + * it is not the correct format */ - EncaseHashSetParser(String filename) throws TskCoreException{ - try{ + EncaseHashSetParser(String filename) throws TskCoreException { + try { this.filename = filename; inputStream = new BufferedInputStream(new FileInputStream(filename)); - + // Read in and test the 16 byte header byte[] header = new byte[16]; readBuffer(header, 16); - if(! Arrays.equals(header, encaseHeader)){ + if (!Arrays.equals(header, encaseHeader)) { close(); throw new TskCoreException("File " + filename + " does not have an Encase header"); } - + // Read in the expected number of hashes (little endian) byte[] sizeBuffer = new byte[4]; readBuffer(sizeBuffer, 4); expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16) - | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); - + | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); + // Read in a bunch of nulls byte[] filler = new byte[0x3f4]; readBuffer(filler, 0x3f4); - + // Read in the hash set name byte[] nameBuffer = new byte[0x50]; readBuffer(nameBuffer, 0x50); - + // Read in the hash set type byte[] typeBuffer = new byte[0x28]; - readBuffer(typeBuffer, 0x28); - + readBuffer(typeBuffer, 0x28); + // At this point we're past the header and ready to read in the hashes - - } catch (IOException ex){ + } catch (IOException ex) { close(); throw new TskCoreException("Error reading " + filename, ex); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { close(); throw ex; } } - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ @Override - public long getExpectedHashCount(){ + public long getExpectedHashCount() { return expectedHashCount; } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override - public boolean doneReading(){ - return(totalHashesRead >= expectedHashCount); + public boolean doneReading() { + return (totalHashesRead >= expectedHashCount); } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ @Override - public String getNextHash() throws TskCoreException{ - if(inputStream == null){ + public String getNextHash() throws TskCoreException { + if (inputStream == null) { throw new TskCoreException("Attempting to read from null inputStream"); } - + byte[] hashBytes = new byte[16]; byte[] divider = new byte[2]; - try{ + try { readBuffer(hashBytes, 16); readBuffer(divider, 2); @@ -131,32 +136,32 @@ class EncaseHashSetParser implements HashSetParser { totalHashesRead++; return sb.toString(); - } catch (IOException ex){ + } catch (IOException ex) { throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex); } } - + /** * Closes the import file */ @Override - public final void close(){ - if(inputStream != null){ - try{ + public final void close() { + if (inputStream != null) { + try { inputStream.close(); - } catch (IOException ex){ + } catch (IOException ex) { Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex); } finally { inputStream = null; } } } - + private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { - if(inputStream == null){ + if (inputStream == null) { throw new TskCoreException("readBuffer called on null inputStream"); } - if(length != inputStream.read(buffer)){ + if (length != inputStream.read(buffer)) { close(); throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index e7bfcecf1e..17c629c50a 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -127,7 +127,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan hashDbOrgLabel.setText(NO_SELECTION_TEXT); hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT); indexPathLabel.setText(NO_SELECTION_TEXT); - // Update indexing components. hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT); @@ -162,14 +161,14 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // Update descriptive labels. hashDbNameLabel.setText(db.getHashSetName()); - hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName()); - try{ - if(db.isUpdateable()){ + hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName()); + try { + if (db.isUpdateable()) { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_editable()); } else { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_readOnly()); } - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_updateStatusError()); } @@ -180,30 +179,30 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan addHashesToDatabaseButton.setEnabled(false); } - if(db instanceof SleuthkitHashSet){ - SleuthkitHashSet hashDb = (SleuthkitHashSet)db; - + if (db instanceof SleuthkitHashSet) { + SleuthkitHashSet hashDb = (SleuthkitHashSet) db; + // Disable the central repo fields hashDbVersionLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbOrgLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); - + // Enable the delete button if ingest is not running deleteDatabaseButton.setEnabled(!ingestIsRunning); - + try { hashDbLocationLabel.setText(shortenPath(db.getDatabasePath())); } catch (TskCoreException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT); } - + try { indexPathLabel.setText(shortenPath(hashDb.getIndexPath())); } catch (TskCoreException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS indexPathLabel.setText(ERROR_GETTING_PATH_TEXT); } - + // Update indexing components. try { if (hashDb.isIndexing()) { @@ -245,15 +244,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan indexButton.setEnabled(false); } } else { - + // Disable the file type fields/buttons indexPathLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbIndexStatusLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbLocationLabel.setText(Bundle.HashLookupSettingsPanel_centralRepo()); indexButton.setEnabled(false); deleteDatabaseButton.setEnabled(false); - - CentralRepoHashSet crDb = (CentralRepoHashSet)db; + + CentralRepoHashSet crDb = (CentralRepoHashSet) db; hashDbVersionLabel.setText(crDb.getVersion()); hashDbOrgLabel.setText(crDb.getOrgName()); @@ -302,13 +301,17 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan @Override @Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.", "HashLookupSettingsPanel.saveFail.title=Save Fail"}) - public void saveSettings() { + public void saveSettings() { + + // Clear out the list of unsaved hashes + newReferenceSetIDs.clear(); + //Checking for for any unindexed databases List unindexed = new ArrayList<>(); for (HashDb db : hashSetManager.getAllHashSets()) { - if(db instanceof SleuthkitHashSet){ + if (db instanceof SleuthkitHashSet) { try { - SleuthkitHashSet hashDatabase = (SleuthkitHashSet)db; + SleuthkitHashSet hashDatabase = (SleuthkitHashSet) db; if (!hashDatabase.hasIndex()) { unindexed.add(hashDatabase); } @@ -320,10 +323,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // If there are unindexed databases, give the user the option to index them now. This // needs to be on the EDT, and will save the hash settings after completing - if(! unindexed.isEmpty()){ - SwingUtilities.invokeLater(new Runnable(){ + if (!unindexed.isEmpty()) { + SwingUtilities.invokeLater(new Runnable() { @Override - public void run(){ + public void run() { //If unindexed ones are found, show a popup box that will either index them, or remove them. if (unindexed.size() == 1) { showInvalidIndex(false, unindexed); @@ -335,7 +338,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } else { try { hashSetManager.save(); - newReferenceSetIDs.clear(); } catch (HashDbManager.HashDbManagerException ex) { SwingUtilities.invokeLater(() -> { JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE); @@ -363,20 +365,20 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan */ if (IngestManager.getInstance().isIngestRunning() == false) { // Remove any new central repo hash sets from the database - for(int refID:newReferenceSetIDs){ - try{ - if(EamDb.isEnabled()){ + for (int refID : newReferenceSetIDs) { + try { + if (EamDb.isEnabled()) { EamDb.getInstance().deleteReferenceSet(refID); } else { // This is the case where the user imported a database, then switched over to the central // repo panel and disabled it before cancelling. We can't delete the database at this point. Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.WARNING, "Error reverting central repository hash sets"); //NON-NLS } - } catch (EamDbException ex){ + } catch (EamDbException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS } } - + HashDbManager.getInstance().loadLastSavedConfiguration(); } } @@ -398,7 +400,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan * unindexed, along with solutions. This method is related to * ModalNoButtons, to be removed at a later date. * - * @param plural Whether or not there are multiple unindexed databases + * @param plural Whether or not there are multiple unindexed databases * @param unindexed The list of unindexed databases. Can be of size 1. */ private void showInvalidIndex(boolean plural, List unindexed) { @@ -471,8 +473,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan getSelectionModel().setSelectionInterval(index, index); } } - - public void selectRowByDatabase(HashDb db){ + + public void selectRowByDatabase(HashDb db) { setSelection(hashSetTableModel.getIndexByDatabase(db)); } @@ -510,7 +512,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan return hashSets.get(rowIndex).getDisplayName(); } - private boolean isValid(int rowIndex) { + private boolean isValid(int rowIndex) { try { return hashSets.get(rowIndex).isValid(); } catch (TskCoreException ex) { @@ -543,15 +545,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } } - int getIndexByDatabase(HashDb db){ + int getIndexByDatabase(HashDb db) { for (int i = 0; i < hashSets.size(); ++i) { if (hashSets.get(i).equals(db)) { return i; } } - return -1; + return -1; } - + @Deprecated int getIndexByName(String name) { for (int i = 0; i < hashSets.size(); ++i) { @@ -934,11 +936,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan private void createDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createDatabaseButtonActionPerformed HashDb hashDb = new HashDbCreateDatabaseDialog().getHashDatabase(); if (null != hashDb) { - if(hashDb instanceof CentralRepoHashSet){ - int newDbIndex = ((CentralRepoHashSet)hashDb).getReferenceSetID(); + if (hashDb instanceof CentralRepoHashSet) { + int newDbIndex = ((CentralRepoHashSet) hashDb).getReferenceSetID(); newReferenceSetIDs.add(newDbIndex); } - + hashSetTableModel.refreshModel(); ((HashSetTable) hashSetTable).selectRowByDatabase(hashDb); firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); @@ -960,7 +962,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // Add a listener for the INDEXING_DONE event. This listener will update // the UI. - SleuthkitHashSet hashDb = (SleuthkitHashSet)hashDatabase; + SleuthkitHashSet hashDb = (SleuthkitHashSet) hashDatabase; hashDb.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { @@ -988,11 +990,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan private void importDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importDatabaseButtonActionPerformed HashDb hashDb = new HashDbImportDatabaseDialog().getHashDatabase(); if (null != hashDb) { - if(hashDb instanceof CentralRepoHashSet){ - int newReferenceSetID = ((CentralRepoHashSet)hashDb).getReferenceSetID(); + if (hashDb instanceof CentralRepoHashSet) { + int newReferenceSetID = ((CentralRepoHashSet) hashDb).getReferenceSetID(); newReferenceSetIDs.add(newReferenceSetID); } - + hashSetTableModel.refreshModel(); ((HashSetTable) hashSetTable).selectRowByDatabase(hashDb); firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); @@ -1002,21 +1004,21 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan @Messages({}) private void deleteDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteDatabaseButtonActionPerformed if (JOptionPane.showConfirmDialog(null, - NbBundle.getMessage(this.getClass(), - "HashDbConfigPanel.deleteDbActionConfirmMsg"), - NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"), - JOptionPane.YES_NO_OPTION, - JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { - HashDb hashDb = ((HashSetTable) hashSetTable).getSelection(); - if (hashDb != null) { - try { - hashSetManager.removeHashDatabaseNoSave(hashDb); - } catch (HashDbManager.HashDbManagerException ex) { - JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName())); + NbBundle.getMessage(this.getClass(), + "HashDbConfigPanel.deleteDbActionConfirmMsg"), + NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"), + JOptionPane.YES_NO_OPTION, + JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { + HashDb hashDb = ((HashSetTable) hashSetTable).getSelection(); + if (hashDb != null) { + try { + hashSetManager.removeHashDatabaseNoSave(hashDb); + } catch (HashDbManager.HashDbManagerException ex) { + JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName())); + } + hashSetTableModel.refreshModel(); + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } - hashSetTableModel.refreshModel(); - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } } }//GEN-LAST:event_deleteDatabaseButtonActionPerformed diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java index fc45856af9..8a7a3ae034 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java @@ -21,27 +21,31 @@ package org.sleuthkit.autopsy.modules.hashdatabase; import org.sleuthkit.datamodel.TskCoreException; interface HashSetParser { - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ String getNextHash() throws TskCoreException; - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ boolean doneReading(); - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ long getExpectedHashCount(); - + /** * Closes the import file */ diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java index af66b994e1..0c1b694e1b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java @@ -31,35 +31,38 @@ import org.sleuthkit.datamodel.TskCoreException; * Parser for idx files (*.idx) */ class IdxHashSetParser implements HashSetParser { + private final String filename; // Name of the input file (saved for logging) private BufferedReader reader; // Input file private final long totalHashes; // Estimated number of hashes private boolean doneReading = false; // Flag for if we've hit the end of the file - - IdxHashSetParser(String filename) throws TskCoreException{ + + IdxHashSetParser(String filename) throws TskCoreException { this.filename = filename; - try{ + try { reader = new BufferedReader(new FileReader(filename)); - } catch (FileNotFoundException ex){ + } catch (FileNotFoundException ex) { throw new TskCoreException("Error opening file " + filename, ex); } - + // Estimate the total number of hashes in the file since counting them all can be slow File importFile = new File(filename); long fileSize = importFile.length(); totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ @Override public String getNextHash() throws TskCoreException { String line; - - try{ + + try { while ((line = reader.readLine()) != null) { String[] parts = line.split("\\|"); @@ -68,45 +71,47 @@ class IdxHashSetParser implements HashSetParser { if (parts.length != 2 || parts[0].length() == 41) { continue; } - + return parts[0].toLowerCase(); } - } catch (IOException ex){ + } catch (IOException ex) { throw new TskCoreException("Error reading file " + filename, ex); } - + // We've run out of data doneReading = true; return null; } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override public boolean doneReading() { return doneReading; } - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ @Override public long getExpectedHashCount() { return totalHashes; } - + /** * Closes the import file */ @Override public void close() { - try{ + try { reader.close(); - } catch (IOException ex){ + } catch (IOException ex) { Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 12f62ba8b1..8e799f4164 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -45,31 +45,31 @@ import org.sleuthkit.datamodel.TskData; /** * Imports a hash set into the central repository and updates a progress dialog */ -class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{ +class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener { private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress", - }) + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",}) ImportCentralRepoDbProgressDialog() { super((JFrame) WindowManager.getDefault().getMainWindow(), Bundle.ImportCentralRepoDbProgressDialog_title_text(), true); - - initComponents(); + + initComponents(); customizeComponents(); } - - private void customizeComponents(){ + + private void customizeComponents() { // This is preventing the user from closing the dialog using the X setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); - + bnOk.setEnabled(false); } - + /** - * Import the selected hash set into the central repository. - * Will bring up a progress dialog while the import is in progress. + * Import the selected hash set into the central repository. Will bring up a + * progress dialog while the import is in progress. + * * @param hashSetName * @param version * @param orgId @@ -77,57 +77,57 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P * @param sendIngestMessages * @param knownFilesType * @param readOnly - * @param importFileName + * @param importFileName */ void importFile(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, String importFileName){ + boolean readOnly, String importFileName) { - worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, + worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFileName); worker.addPropertyChangeListener(this); worker.execute(); - - setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow()); + + setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow()); this.setVisible(true); } - + /** - * Get the HashDb object for the newly imported data. - * Should be called after importFile completes. + * Get the HashDb object for the newly imported data. Should be called after + * importFile completes. + * * @return The new HashDb object or null if the import failed/was canceled */ - HashDbManager.HashDb getDatabase(){ - if(worker != null){ + HashDbManager.HashDb getDatabase() { + if (worker != null) { return worker.getDatabase(); } return null; } - - + /** - * Updates the dialog from events from the worker. - * The two events we handle are progress updates and - * the done event. - * @param evt + * Updates the dialog from events from the worker. The two events we handle + * are progress updates and the done event. + * + * @param evt */ @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"}) @Override public void propertyChange(PropertyChangeEvent evt) { - - if("progress".equals(evt.getPropertyName())){ + + if ("progress".equals(evt.getPropertyName())) { // The progress has been updated. Update the progress bar and text progressBar.setValue(worker.getProgress()); lbProgress.setText(getProgressString()); } else if ("state".equals(evt.getPropertyName()) && (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) { - + // The worker is done processing // Disable cancel button and enable ok bnCancel.setEnabled(false); bnOk.setEnabled(true); - - if(worker.getImportSuccess()){ + + if (worker.getImportSuccess()) { // If the import succeeded, finish the progress bar and display the // total number of imported hashes progressBar.setValue(progressBar.getMaximum()); @@ -140,13 +140,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } } - + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"}) - private String getProgressString(){ + private String getProgressString() { return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message(); } - - private class CentralRepoImportWorker extends SwingWorker{ + + private class CentralRepoImportWorker extends SwingWorker { + private final int HASH_IMPORT_THRESHOLD = 10000; private final String hashSetName; private final String version; @@ -160,11 +161,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private final AtomicInteger referenceSetID = new AtomicInteger(); private final AtomicLong hashCount = new AtomicLong(); private final AtomicBoolean importSuccess = new AtomicBoolean(); - + CentralRepoImportWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, String importFileName){ - + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, String importFileName) { + this.hashSetName = hashSetName; this.version = version; this.orgId = orgId; @@ -177,47 +178,53 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.importSuccess.set(false); this.referenceSetID.set(-1); } - + /** * Get the newly created database - * @return the imported database. May be null if an error occurred or the user canceled + * + * @return the imported database. May be null if an error occurred or + * the user canceled */ - synchronized HashDbManager.CentralRepoHashSet getDatabase(){ + synchronized HashDbManager.CentralRepoHashSet getDatabase() { return newHashDb; } - + /** * Get the number of hashes that have been read in so far + * * @return current hash count */ - long getNumHashesProcessed(){ + long getNumHashesProcessed() { return hashCount.get(); } - + /** * Check if the import was successful or if there was an error. - * @return true if the import process completed without error, false otherwise + * + * @return true if the import process completed without error, false + * otherwise */ - boolean getImportSuccess(){ + boolean getImportSuccess() { return importSuccess.get(); } - + @Override protected Void doInBackground() throws Exception { - + // Create the hash set parser HashSetParser hashSetParser; - if(importFileName.toLowerCase().endsWith(".idx")){ + if (importFileName.toLowerCase().endsWith(".idx")) { hashSetParser = new IdxHashSetParser(importFileName); - } else - if(importFileName.toLowerCase().endsWith(".hash")){ - hashSetParser = new EncaseHashSetParser(importFileName); } else { - // We've gotten here with a format that can't be processed - throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); + if (importFileName.toLowerCase().endsWith(".hash")) { + hashSetParser = new EncaseHashSetParser(importFileName); + } else { + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); + } } - try{ + try { // Conver to the FileKnown enum used by EamGlobalSet TskData.FileKnown knownStatus; if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { @@ -225,7 +232,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } else { knownStatus = TskData.FileKnown.BAD; } - + // Create an empty hashset in the central repository EamDb dbManager = EamDb.getInstance(); referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly))); @@ -237,30 +244,30 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P // Holds the current batch of hashes that need to be written to the central repo Set globalInstances = new HashSet<>(); - while (! hashSetParser.doneReading()) { - if(isCancelled()){ + while (!hashSetParser.doneReading()) { + if (isCancelled()) { return null; } String newHash = hashSetParser.getNextHash(); - if(newHash != null){ + if (newHash != null) { EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - referenceSetID.get(), - newHash, - knownStatus, + referenceSetID.get(), + newHash, + knownStatus, ""); globalInstances.add(eamGlobalFileInstance); // If we've hit the threshold for writing the hashes, write them // all to the central repo - if(hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){ + if (hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0) { dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); globalInstances.clear(); - int progress = (int)(hashCount.get() * 100 / hashSetParser.getExpectedHashCount()); - if(progress < 100){ + int progress = (int) (hashCount.get() * 100 / hashSetParser.getExpectedHashCount()); + if (progress < 100) { this.setProgress(progress); } else { this.setProgress(99); @@ -277,41 +284,41 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P hashSetParser.close(); } } - - private void deleteIncompleteSet(){ - if(referenceSetID.get() >= 0){ - + + private void deleteIncompleteSet() { + if (referenceSetID.get() >= 0) { + // This can be slow on large reference sets Executors.newSingleThreadExecutor().execute(new Runnable() { - @Override + @Override public void run() { - try{ + try { EamDb.getInstance().deleteReferenceSet(referenceSetID.get()); - } catch (EamDbException ex2){ + } catch (EamDbException ex2) { Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); } } }); } } - + @Override synchronized protected void done() { - - if(isCancelled()){ + + if (isCancelled()) { // If the user hit cancel, delete this incomplete hash set from the central repo deleteIncompleteSet(); return; } - + try { get(); - try{ - newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, - referenceSetID.get(), + try { + newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, + referenceSetID.get(), searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); importSuccess.set(true); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); } } catch (Exception ex) { @@ -319,10 +326,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P deleteIncompleteSet(); Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex); } - } - + } + } - + /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always @@ -416,4 +423,4 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private javax.swing.JLabel lbProgress; private javax.swing.JProgressBar progressBar; // End of variables declaration//GEN-END:variables -} \ No newline at end of file +} From 6f2fae67afeee05cf7f25178282f0d2ac235c93d Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Wed, 22 Nov 2017 08:21:48 -0500 Subject: [PATCH 17/17] Cleanup --- .../hashdatabase/EncaseHashSetParser.java | 1 - .../ImportCentralRepoDbProgressDialog.java | 8 +- .../hashdatabase/KdbHashSetParser.java | 95 +++++++++++-------- 3 files changed, 56 insertions(+), 48 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index 8ea9bc3c46..4c6a58d9bf 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -162,7 +162,6 @@ class EncaseHashSetParser implements HashSetParser { throw new TskCoreException("readBuffer called on null inputStream"); } if (length != inputStream.read(buffer)) { - close(); throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 3421a06044..a2e9522893 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -220,12 +220,8 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } else if(importFileName.toLowerCase().endsWith(".kdb")){ hashSetParser = new KdbHashSetParser(importFileName); } else { - if (importFileName.toLowerCase().endsWith(".hash")) { - hashSetParser = new EncaseHashSetParser(importFileName); - } else { - // We've gotten here with a format that can't be processed - throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); - } + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); } try { diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java index 944780936b..5935784087 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -1,13 +1,25 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.modules.hashdatabase; import java.sql.Connection; import java.sql.DriverManager; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @@ -16,35 +28,35 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; /** - * + * Parser for Autopsy/TSK-created databases (*.kdb) */ public class KdbHashSetParser implements HashSetParser { + private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS private final String filename; // Name of the input file (saved for logging) private final long totalHashes; // Estimated number of hashes - private int totalHashesRead = 0; // Number of hashes that have been read + private int totalHashesRead = 0; // Number of hashes that have been read private Connection conn; private Statement statement; private ResultSet resultSet; - - - KdbHashSetParser(String filename) throws TskCoreException{ + + KdbHashSetParser(String filename) throws TskCoreException { this.filename = filename; - + conn = null; statement = null; resultSet = null; - - try{ + + try { // Open the database StringBuilder connectionURL = new StringBuilder(); connectionURL.append(JDBC_BASE_URI); connectionURL.append(filename); Class.forName(JDBC_DRIVER); - conn = DriverManager.getConnection(connectionURL.toString()); - + conn = DriverManager.getConnection(connectionURL.toString()); + // Get the number of hashes in the table statement = conn.createStatement(); resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes"); @@ -54,94 +66,95 @@ public class KdbHashSetParser implements HashSetParser { close(); throw new TskCoreException("Error getting hash count from database " + filename); } - + // Get the hashes resultSet = statement.executeQuery("SELECT md5 FROM hashes"); - + // At this point, getNextHash can read each hash from the result set - - } catch (ClassNotFoundException | SQLException ex){ + } catch (ClassNotFoundException | SQLException ex) { throw new TskCoreException("Error opening/reading database " + filename, ex); } - + } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string + * @throws TskCoreException */ @Override public String getNextHash() throws TskCoreException { - - try{ - if(resultSet.next()){ + + try { + if (resultSet.next()) { byte[] hashBytes = resultSet.getBytes("md5"); StringBuilder sb = new StringBuilder(); for (byte b : hashBytes) { sb.append(String.format("%02x", b)); } - if(sb.toString().length() != 32){ + if (sb.toString().length() != 32) { throw new TskCoreException("Hash has incorrect length: " + sb.toString()); - } - + } + totalHashesRead++; return sb.toString(); } else { throw new TskCoreException("Could not read expected number of hashes from database " + filename); } - } catch (SQLException ex){ + } catch (SQLException ex) { throw new TskCoreException("Error reading hash from result set for database " + filename, ex); } } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override public boolean doneReading() { - return(totalHashesRead >= totalHashes); + return (totalHashesRead >= totalHashes); } - + /** * Get the expected number of hashes in the file. - * This number can be an estimate. + * * @return The expected hash count */ @Override - public long getExpectedHashCount() { + public long getExpectedHashCount() { return totalHashes; } - + /** * Closes the import file */ @Override public final void close() { - if(statement != null){ + if (statement != null) { try { statement.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex); } } - - if(resultSet != null){ + + if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex); } } - - if(conn != null){ + + if (conn != null) { try { conn.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex); } - } + } } }