worked through kdb parser and integration

This commit is contained in:
Greg DiCristofaro 2020-04-30 13:17:25 -04:00
parent 634ce51266
commit e4b13b9df1
3 changed files with 45 additions and 12 deletions

View File

@ -55,7 +55,7 @@ interface HashSetParser {
/**
* Get the next hash to import as a HashEntry object.
*
* @return A new hash entry for the next item parsed or null if no more items.
* @return A new hash entry for the next item parsed.
* @throws TskCoreException
*/
default HashEntry getNextHashEntry() throws TskCoreException {

View File

@ -41,6 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.datamodel.HashEntry;
/**
* Imports a hash set into the central repository and updates a progress dialog
@ -250,14 +251,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
return null;
}
String newHash = hashSetParser.getNextHash();
HashEntry newHash = hashSetParser.getNextHashEntry();
if (newHash != null) {
CentralRepoFileInstance eamGlobalFileInstance = new CentralRepoFileInstance(
referenceSetID.get(),
newHash,
newHash.getMd5Hash(),
knownStatus,
"");
newHash.getComment() != null ? newHash.getComment() : "");
globalInstances.add(eamGlobalFileInstance);

View File

@ -20,9 +20,12 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.HashEntry;
@ -80,10 +83,28 @@ public class KdbHashSetParser implements HashSetParser {
private static class HashRow {
private final String md5Hash;
private
private final long hashId;
HashRow(String md5Hash, long hashId) {
this.md5Hash = md5Hash;
this.hashId = hashId;
}
private Stuff getNextHashEntry() throws TskCoreException {
String getMd5Hash() {
return md5Hash;
}
long getHashId() {
return hashId;
}
}
/**
* Retrieves the row id and md5 hash for the next item in the hashes table.
* @return A hash row object containing the hash and id.
* @throws TskCoreException
*/
private HashRow getNextHashRow() throws TskCoreException {
try {
if (resultSet.next()) {
long hashId = resultSet.getLong("id");
@ -98,10 +119,8 @@ public class KdbHashSetParser implements HashSetParser {
}
String md5Hash = sb.toString();
return new
totalHashesRead++;
return new HashRow(md5Hash, hashId);
} else {
throw new TskCoreException("Could not read expected number of hashes from hash set " + filename);
}
@ -118,13 +137,26 @@ public class KdbHashSetParser implements HashSetParser {
*/
@Override
public String getNextHash() throws TskCoreException {
return getNextHashRow().getMd5Hash();
}
@Override
public HashEntry getNextHashEntry() throws TskCoreException {
return HashSetParser.super.getNextHashEntry(); //To change body of generated methods, choose Tools | Templates.
HashRow row = getNextHashRow();
try {
PreparedStatement getComment = conn.prepareStatement("SELECT comment FROM comments WHERE hash_id = ?");
getComment.setLong(0, row.getHashId());
ResultSet commentResults = getComment.executeQuery();
List<String> comments = new ArrayList<>();
while (commentResults.next())
comments.add(commentResults.getString("comment"));
String comment = comments.size() > 0 ? String.join(" ", comments) : null;
return new HashEntry(null, row.getMd5Hash(), null, null, comment);
}
catch (SQLException ex) {
throw new TskCoreException("Error opening/reading hash set " + filename, ex);
}
}