From 6f2fae67afeee05cf7f25178282f0d2ac235c93d Mon Sep 17 00:00:00 2001 From: Ann Priestman Date: Wed, 22 Nov 2017 08:21:48 -0500 Subject: [PATCH] Cleanup --- .../hashdatabase/EncaseHashSetParser.java | 1 - .../ImportCentralRepoDbProgressDialog.java | 8 +- .../hashdatabase/KdbHashSetParser.java | 95 +++++++++++-------- 3 files changed, 56 insertions(+), 48 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index 8ea9bc3c46..4c6a58d9bf 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -162,7 +162,6 @@ class EncaseHashSetParser implements HashSetParser { throw new TskCoreException("readBuffer called on null inputStream"); } if (length != inputStream.read(buffer)) { - close(); throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 3421a06044..a2e9522893 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -220,12 +220,8 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } else if(importFileName.toLowerCase().endsWith(".kdb")){ hashSetParser = new KdbHashSetParser(importFileName); } else { - if (importFileName.toLowerCase().endsWith(".hash")) { - hashSetParser = new EncaseHashSetParser(importFileName); - } else { - // We've gotten here with a format that can't be processed - throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); - } + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); } try { diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java index 944780936b..5935784087 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/KdbHashSetParser.java @@ -1,13 +1,25 @@ /* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. + * Autopsy Forensic Browser + * + * Copyright 2011 - 2017 Basis Technology Corp. + * Contact: carrier sleuthkit org + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. */ package org.sleuthkit.autopsy.modules.hashdatabase; import java.sql.Connection; import java.sql.DriverManager; -import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; @@ -16,35 +28,35 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.TskCoreException; /** - * + * Parser for Autopsy/TSK-created databases (*.kdb) */ public class KdbHashSetParser implements HashSetParser { + private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS private final String filename; // Name of the input file (saved for logging) private final long totalHashes; // Estimated number of hashes - private int totalHashesRead = 0; // Number of hashes that have been read + private int totalHashesRead = 0; // Number of hashes that have been read private Connection conn; private Statement statement; private ResultSet resultSet; - - - KdbHashSetParser(String filename) throws TskCoreException{ + + KdbHashSetParser(String filename) throws TskCoreException { this.filename = filename; - + conn = null; statement = null; resultSet = null; - - try{ + + try { // Open the database StringBuilder connectionURL = new StringBuilder(); connectionURL.append(JDBC_BASE_URI); connectionURL.append(filename); Class.forName(JDBC_DRIVER); - conn = DriverManager.getConnection(connectionURL.toString()); - + conn = DriverManager.getConnection(connectionURL.toString()); + // Get the number of hashes in the table statement = conn.createStatement(); resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes"); @@ -54,94 +66,95 @@ public class KdbHashSetParser implements HashSetParser { close(); throw new TskCoreException("Error getting hash count from database " + filename); } - + // Get the hashes resultSet = statement.executeQuery("SELECT md5 FROM hashes"); - + // At this point, getNextHash can read each hash from the result set - - } catch (ClassNotFoundException | SQLException ex){ + } catch (ClassNotFoundException | SQLException ex) { throw new TskCoreException("Error opening/reading database " + filename, ex); } - + } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string + * @throws TskCoreException */ @Override public String getNextHash() throws TskCoreException { - - try{ - if(resultSet.next()){ + + try { + if (resultSet.next()) { byte[] hashBytes = resultSet.getBytes("md5"); StringBuilder sb = new StringBuilder(); for (byte b : hashBytes) { sb.append(String.format("%02x", b)); } - if(sb.toString().length() != 32){ + if (sb.toString().length() != 32) { throw new TskCoreException("Hash has incorrect length: " + sb.toString()); - } - + } + totalHashesRead++; return sb.toString(); } else { throw new TskCoreException("Could not read expected number of hashes from database " + filename); } - } catch (SQLException ex){ + } catch (SQLException ex) { throw new TskCoreException("Error reading hash from result set for database " + filename, ex); } } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override public boolean doneReading() { - return(totalHashesRead >= totalHashes); + return (totalHashesRead >= totalHashes); } - + /** * Get the expected number of hashes in the file. - * This number can be an estimate. + * * @return The expected hash count */ @Override - public long getExpectedHashCount() { + public long getExpectedHashCount() { return totalHashes; } - + /** * Closes the import file */ @Override public final void close() { - if(statement != null){ + if (statement != null) { try { statement.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex); } } - - if(resultSet != null){ + + if (resultSet != null) { try { resultSet.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex); } } - - if(conn != null){ + + if (conn != null) { try { conn.close(); } catch (SQLException ex) { Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex); } - } + } } }