Refactored the database import workers

This commit is contained in:
Ann Priestman 2017-11-09 08:09:16 -05:00
parent 3605ba147e
commit 4b6a9cb4a0

View File

@ -111,7 +111,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
return null; return null;
} }
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= lines processed"}) @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= hashes processed"})
@Override @Override
public void propertyChange(PropertyChangeEvent evt) { public void propertyChange(PropertyChangeEvent evt) {
@ -139,23 +139,23 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed(); return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed();
} }
abstract class CentralRepoImportWorker2 extends SwingWorker<Void, Void>{ abstract class CentralRepoImportWorker extends SwingWorker<Void, Void>{
private final int HASH_IMPORT_THRESHOLD = 10000; final int HASH_IMPORT_THRESHOLD = 10000;
private final String hashSetName; final String hashSetName;
private final String version; final String version;
private final int orgId; final int orgId;
private final boolean searchDuringIngest; final boolean searchDuringIngest;
private final boolean sendIngestMessages; final boolean sendIngestMessages;
private final HashDbManager.HashDb.KnownFilesType knownFilesType; final HashDbManager.HashDb.KnownFilesType knownFilesType;
private final boolean readOnly; final boolean readOnly;
private final File importFile; final File importFile;
private long totalLines; long totalHashes = 1;
private int crIndex = -1; int referenceSetID = -1;
private HashDbManager.CentralRepoHashDb newHashDb = null; HashDbManager.CentralRepoHashDb newHashDb = null;
private final AtomicLong numLines = new AtomicLong(); final AtomicLong numLines = new AtomicLong();
private String errorString = ""; String errorString = "";
CentralRepoImportWorker2(String hashSetName, String version, int orgId, CentralRepoImportWorker(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, File importFile){ boolean readOnly, File importFile){
@ -186,88 +186,88 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
return errorString; return errorString;
} }
/**
* Should be called in the constructor to set the max number of hashes.
* The value can be updated later after parsing the import file.
*/
abstract void setEstimatedTotalHashes();
void deleteIncompleteSet(){
if(referenceSetID >= 0){
// This can be slow on large reference sets
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
try{
EamDb.getInstance().deleteReferenceSet(referenceSetID);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
}
}
});
}
}
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importHashsetError=Error importing hash set",
"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"})
@Override
protected void done() {
if(isCancelled()){
// If the user hit cancel, delete this incomplete hash set from the central repo
deleteIncompleteSet();
return;
}
try {
get();
try{
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
referenceSetID,
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
} catch (TskCoreException ex){
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
}
} catch (Exception ex) {
// Delete this incomplete hash set from the central repo
deleteIncompleteSet();
errorString = Bundle.ImportCentralRepoDbProgressDialog_importHashsetError();
}
}
} }
private interface CentralRepoImportWorker{ class ImportEncaseWorker extends CentralRepoImportWorker{
void execute();
boolean cancel(boolean mayInterruptIfRunning);
void addPropertyChangeListener(PropertyChangeListener dialog);
int getProgressPercentage();
long getLinesProcessed();
HashDbManager.HashDatabase getDatabase();
String getError();
}
class ImportEncaseWorker extends SwingWorker<Void,Void> implements CentralRepoImportWorker{
private final int HASH_IMPORT_THRESHOLD = 10000;
private final String hashSetName;
private final String version;
private final int orgId;
private final boolean searchDuringIngest;
private final boolean sendIngestMessages;
private final HashDbManager.HashDb.KnownFilesType knownFilesType;
private final boolean readOnly;
private final File importFile;
private final long totalLines;
private int crIndex = -1;
private HashDbManager.CentralRepoHashDb newHashDb = null;
private final AtomicLong numLines = new AtomicLong();
private String errorString = "";
ImportEncaseWorker(String hashSetName, String version, int orgId, ImportEncaseWorker(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, File importFile){ boolean readOnly, File importFile){
super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile);
this.hashSetName = hashSetName; setEstimatedTotalHashes();
this.version = version;
this.orgId = orgId;
this.searchDuringIngest = searchDuringIngest;
this.sendIngestMessages = sendIngestMessages;
this.knownFilesType = knownFilesType;
this.readOnly = readOnly;
this.importFile = importFile;
this.numLines.set(0);
this.totalLines = getEstimatedTotalHashes();
} }
/** /**
* Encase files have a 0x480 byte header, then each hash takes 18 bytes * Encase files have a 0x480 byte header, then each hash takes 18 bytes
* @return Approximate number of hashes in the file * @return Approximate number of hashes in the file
*/ */
final long getEstimatedTotalHashes(){ @Override
final void setEstimatedTotalHashes(){
long fileSize = importFile.length(); long fileSize = importFile.length();
if(fileSize < 0x492){ if(fileSize < 0x492){
return 1; // There's room for at most one hash totalHashes = 1; // There's room for at most one hash
} }
return ((fileSize - 0x492) / 18); totalHashes = (fileSize - 0x492) / 18;
}
@Override
public HashDbManager.HashDatabase getDatabase(){
return newHashDb;
}
@Override
public long getLinesProcessed(){
return numLines.get();
}
@Override
public int getProgressPercentage(){
return this.getProgress();
}
@Override
public String getError(){
return errorString;
} }
@Override @Override
protected Void doInBackground() throws Exception { protected Void doInBackground() throws Exception {
EncaseHashSetParser encaseParser = new EncaseHashSetParser(this.importFile.getAbsolutePath()); EncaseHashSetParser encaseParser = new EncaseHashSetParser(importFile.getAbsolutePath());
totalHashes = encaseParser.getExpectedHashes();
TskData.FileKnown knownStatus; TskData.FileKnown knownStatus;
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
@ -277,7 +277,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
} }
// Create an empty hashset in the central repository // Create an empty hashset in the central repository
crIndex = EamDb.getInstance().newReferenceSet(orgId, hashSetName, version, knownStatus, readOnly); referenceSetID = EamDb.getInstance().newReferenceSet(orgId, hashSetName, version, knownStatus, readOnly);
EamDb dbManager = EamDb.getInstance(); EamDb dbManager = EamDb.getInstance();
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
@ -293,7 +293,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
if(newHash != null){ if(newHash != null){
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
crIndex, referenceSetID,
newHash, newHash,
knownStatus, knownStatus,
""); "");
@ -305,7 +305,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
globalInstances.clear(); globalInstances.clear();
int progress = (int)(numLines.get() * 100 / totalLines); int progress = (int)(numLines.get() * 100 / totalHashes);
if(progress < 100){ if(progress < 100){
this.setProgress(progress); this.setProgress(progress);
} else { } else {
@ -319,90 +319,17 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
this.setProgress(100); this.setProgress(100);
return null; return null;
} }
private void deleteIncompleteSet(int crIndex){
if(crIndex >= 0){
// This can be slow on large reference sets
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
try{
EamDb.getInstance().deleteReferenceSet(crIndex);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
}
}
});
}
}
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importError=Error importing hash set"})
@Override
protected void done() {
if(isCancelled()){
// If the user hit cancel, delete this incomplete hash set from the central repo
deleteIncompleteSet(crIndex);
return;
}
try {
get();
try{
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
crIndex,
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
} catch (TskCoreException ex){
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
}
} catch (Exception ex) {
// Delete this incomplete hash set from the central repo
if(crIndex >= 0){
try{
EamDb.getInstance().deleteReferenceSet(crIndex);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex);
}
}
errorString = Bundle.ImportCentralRepoDbProgressDialog_importError();
}
}
} }
class ImportIDXWorker extends SwingWorker<Void,Void> implements CentralRepoImportWorker{
private final int HASH_IMPORT_THRESHOLD = 10000; class ImportIDXWorker extends CentralRepoImportWorker{
private final String hashSetName;
private final String version;
private final int orgId;
private final boolean searchDuringIngest;
private final boolean sendIngestMessages;
private final HashDbManager.HashDb.KnownFilesType knownFilesType;
private final boolean readOnly;
private final File importFile;
private final long totalLines;
private int referenceSetID = -1;
private HashDbManager.CentralRepoHashDb newHashDb = null;
private final AtomicLong numLines = new AtomicLong();
private String errorString = "";
ImportIDXWorker(String hashSetName, String version, int orgId, ImportIDXWorker(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, File importFile){ boolean readOnly, File importFile){
super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile);
this.hashSetName = hashSetName; setEstimatedTotalHashes();
this.version = version;
this.orgId = orgId;
this.searchDuringIngest = searchDuringIngest;
this.sendIngestMessages = sendIngestMessages;
this.knownFilesType = knownFilesType;
this.readOnly = readOnly;
this.importFile = importFile;
this.numLines.set(0);
this.totalLines = getEstimatedTotalHashes();
} }
/** /**
@ -411,29 +338,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
* progress bar. * progress bar.
* @return Approximate number of hashes in the file * @return Approximate number of hashes in the file
*/ */
final long getEstimatedTotalHashes(){ @Override
final void setEstimatedTotalHashes(){
long fileSize = importFile.length(); long fileSize = importFile.length();
return (fileSize / 0x33 + 1); // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero
}
@Override
public HashDbManager.HashDatabase getDatabase(){
return newHashDb;
}
@Override
public long getLinesProcessed(){
return numLines.get();
}
@Override
public int getProgressPercentage(){
return this.getProgress();
}
@Override
public String getError(){
return errorString;
} }
@Override @Override
@ -480,7 +388,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
globalInstances.clear(); globalInstances.clear();
int progress = (int)(numLines.get() * 100 / totalLines); int progress = (int)(numLines.get() * 100 / totalHashes);
if(progress < 100){ if(progress < 100){
this.setProgress(progress); this.setProgress(progress);
} else { } else {
@ -494,54 +402,6 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
return null; return null;
} }
private void deleteIncompleteSet(int idToDelete){
if(idToDelete >= 0){
// This can be slow on large reference sets
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
public void run() {
try{
EamDb.getInstance().deleteReferenceSet(idToDelete);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
}
}
});
}
}
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"})
@Override
protected void done() {
if(isCancelled()){
// If the user hit cancel, delete this incomplete hash set from the central repo
deleteIncompleteSet(referenceSetID);
return;
}
try {
get();
try{
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
referenceSetID,
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
} catch (TskCoreException ex){
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
}
} catch (Exception ex) {
// Delete this incomplete hash set from the central repo
if(referenceSetID >= 0){
try{
EamDb.getInstance().deleteReferenceSet(referenceSetID);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex);
}
}
}
}
} }
/** /**