diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index 8ea9bc3c46..4c6a58d9bf 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -162,7 +162,6 @@ class EncaseHashSetParser implements HashSetParser { throw new TskCoreException("readBuffer called on null inputStream"); } if (length != inputStream.read(buffer)) { - close(); throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java index 2937d53983..db70d1114d 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbImportDatabaseDialog.java @@ -88,11 +88,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog { fileChooser.setMultiSelectionEnabled(false); } - @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx or .hash only)"}) + @NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.kdb, .idx or .hash)"}) private void updateFileChooserFilter() { fileChooser.resetChoosableFileFilters(); if(centralRepoRadioButton.isSelected()){ - String[] EXTENSION = new String[]{"hash", "Hash", "idx"}; //NON-NLS + String[] EXTENSION = new String[]{"kdb", "idx", "hash", "Hash"}; //NON-NLS FileNameExtensionFilter filter = new FileNameExtensionFilter( NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.centralRepoExtFilter.text"), EXTENSION); fileChooser.setFileFilter(filter); diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 8e799f4164..a2e9522893 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -215,13 +215,13 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P HashSetParser hashSetParser; if (importFileName.toLowerCase().endsWith(".idx")) { hashSetParser = new IdxHashSetParser(importFileName); + } else if(importFileName.toLowerCase().endsWith(".hash")){ + hashSetParser = new EncaseHashSetParser(importFileName); + } else if(importFileName.toLowerCase().endsWith(".kdb")){ + hashSetParser = new KdbHashSetParser(importFileName); } else { - if (importFileName.toLowerCase().endsWith(".hash")) { - hashSetParser = new EncaseHashSetParser(importFileName); - } else { - // We've gotten here with a format that can't be processed - throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); - } + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); } try { diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java new file mode 100644 index 0000000000..5935784087 --- /dev/null +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -0,0 +1,160 @@ +/* + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.sleuthkit.autopsy.modules.hashdatabase; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.logging.Level; +import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.datamodel.TskCoreException; + +/** + * Parser for Autopsy/TSK-created databases (*.kdb) + */ +public class KdbHashSetParser implements HashSetParser { + + private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS + private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS + + private final String filename; // Name of the input file (saved for logging) + private final long totalHashes; // Estimated number of hashes + private int totalHashesRead = 0; // Number of hashes that have been read + private Connection conn; + private Statement statement; + private ResultSet resultSet; + + KdbHashSetParser(String filename) throws TskCoreException { + this.filename = filename; + + conn = null; + statement = null; + resultSet = null; + + try { + // Open the database + StringBuilder connectionURL = new StringBuilder(); + connectionURL.append(JDBC_BASE_URI); + connectionURL.append(filename); + Class.forName(JDBC_DRIVER); + conn = DriverManager.getConnection(connectionURL.toString()); + + // Get the number of hashes in the table + statement = conn.createStatement(); + resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes"); + if (resultSet.next()) { + totalHashes = resultSet.getLong("count"); + } else { + close(); + throw new TskCoreException("Error getting hash count from database " + filename); + } + + // Get the hashes + resultSet = statement.executeQuery("SELECT md5 FROM hashes"); + + // At this point, getNextHash can read each hash from the result set + } catch (ClassNotFoundException | SQLException ex) { + throw new TskCoreException("Error opening/reading database " + filename, ex); + } + + } + + /** + * Get the next hash to import + * + * @return The hash as a string + * @throws TskCoreException + */ + @Override + public String getNextHash() throws TskCoreException { + + try { + if (resultSet.next()) { + byte[] hashBytes = resultSet.getBytes("md5"); + StringBuilder sb = new StringBuilder(); + for (byte b : hashBytes) { + sb.append(String.format("%02x", b)); + } + + if (sb.toString().length() != 32) { + throw new TskCoreException("Hash has incorrect length: " + sb.toString()); + } + + totalHashesRead++; + return sb.toString(); + } else { + throw new TskCoreException("Could not read expected number of hashes from database " + filename); + } + } catch (SQLException ex) { + throw new TskCoreException("Error reading hash from result set for database " + filename, ex); + } + } + + /** + * Check if there are more hashes to read + * + * @return true if we've read all expected hash values, false otherwise + */ + @Override + public boolean doneReading() { + return (totalHashesRead >= totalHashes); + } + + /** + * Get the expected number of hashes in the file. + * + * @return The expected hash count + */ + @Override + public long getExpectedHashCount() { + return totalHashes; + } + + /** + * Closes the import file + */ + @Override + public final void close() { + if (statement != null) { + try { + statement.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex); + } + } + + if (resultSet != null) { + try { + resultSet.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex); + } + } + + if (conn != null) { + try { + conn.close(); + } catch (SQLException ex) { + Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex); + } + } + } +}