diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java index cfc4b0b384..8ea9bc3c46 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/EncaseHashSetParser.java @@ -31,95 +31,100 @@ import org.sleuthkit.datamodel.TskCoreException; * Parser for Encase format hash sets (*.hash) */ class EncaseHashSetParser implements HashSetParser { - private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00, - (byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00}; + + private final byte[] encaseHeader = {(byte) 0x48, (byte) 0x41, (byte) 0x53, (byte) 0x48, (byte) 0x0d, (byte) 0x0a, (byte) 0xff, (byte) 0x00, + (byte) 0x02, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00}; private final String filename; // Name of the input file (saved for logging) private InputStream inputStream; // File stream for file being imported private final long expectedHashCount; // Number of hashes we expect to read from the file private int totalHashesRead = 0; // Number of hashes that have been read - + /** - * Opens the import file and parses the header. - * If this is successful, the file will be set up to call getNextHash() to - * read the hash values. + * Opens the import file and parses the header. If this is successful, the + * file will be set up to call getNextHash() to read the hash values. + * * @param filename The Encase hash set - * @throws TskCoreException There was an error opening/reading the file or it is not the correct format + * @throws TskCoreException There was an error opening/reading the file or + * it is not the correct format */ - EncaseHashSetParser(String filename) throws TskCoreException{ - try{ + EncaseHashSetParser(String filename) throws TskCoreException { + try { this.filename = filename; inputStream = new BufferedInputStream(new FileInputStream(filename)); - + // Read in and test the 16 byte header byte[] header = new byte[16]; readBuffer(header, 16); - if(! Arrays.equals(header, encaseHeader)){ + if (!Arrays.equals(header, encaseHeader)) { close(); throw new TskCoreException("File " + filename + " does not have an Encase header"); } - + // Read in the expected number of hashes (little endian) byte[] sizeBuffer = new byte[4]; readBuffer(sizeBuffer, 4); expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16) - | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); - + | ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff); + // Read in a bunch of nulls byte[] filler = new byte[0x3f4]; readBuffer(filler, 0x3f4); - + // Read in the hash set name byte[] nameBuffer = new byte[0x50]; readBuffer(nameBuffer, 0x50); - + // Read in the hash set type byte[] typeBuffer = new byte[0x28]; - readBuffer(typeBuffer, 0x28); - + readBuffer(typeBuffer, 0x28); + // At this point we're past the header and ready to read in the hashes - - } catch (IOException ex){ + } catch (IOException ex) { close(); throw new TskCoreException("Error reading " + filename, ex); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { close(); throw ex; } } - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ @Override - public long getExpectedHashCount(){ + public long getExpectedHashCount() { return expectedHashCount; } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override - public boolean doneReading(){ - return(totalHashesRead >= expectedHashCount); + public boolean doneReading() { + return (totalHashesRead >= expectedHashCount); } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ @Override - public String getNextHash() throws TskCoreException{ - if(inputStream == null){ + public String getNextHash() throws TskCoreException { + if (inputStream == null) { throw new TskCoreException("Attempting to read from null inputStream"); } - + byte[] hashBytes = new byte[16]; byte[] divider = new byte[2]; - try{ + try { readBuffer(hashBytes, 16); readBuffer(divider, 2); @@ -131,32 +136,32 @@ class EncaseHashSetParser implements HashSetParser { totalHashesRead++; return sb.toString(); - } catch (IOException ex){ + } catch (IOException ex) { throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex); } } - + /** * Closes the import file */ @Override - public final void close(){ - if(inputStream != null){ - try{ + public final void close() { + if (inputStream != null) { + try { inputStream.close(); - } catch (IOException ex){ + } catch (IOException ex) { Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex); } finally { inputStream = null; } } } - + private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException { - if(inputStream == null){ + if (inputStream == null) { throw new TskCoreException("readBuffer called on null inputStream"); } - if(length != inputStream.read(buffer)){ + if (length != inputStream.read(buffer)) { close(); throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename); } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java index e7bfcecf1e..17c629c50a 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashLookupSettingsPanel.java @@ -127,7 +127,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan hashDbOrgLabel.setText(NO_SELECTION_TEXT); hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT); indexPathLabel.setText(NO_SELECTION_TEXT); - // Update indexing components. hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT); @@ -162,14 +161,14 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // Update descriptive labels. hashDbNameLabel.setText(db.getHashSetName()); - hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName()); - try{ - if(db.isUpdateable()){ + hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName()); + try { + if (db.isUpdateable()) { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_editable()); } else { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_readOnly()); } - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_updateStatusError()); } @@ -180,30 +179,30 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan addHashesToDatabaseButton.setEnabled(false); } - if(db instanceof SleuthkitHashSet){ - SleuthkitHashSet hashDb = (SleuthkitHashSet)db; - + if (db instanceof SleuthkitHashSet) { + SleuthkitHashSet hashDb = (SleuthkitHashSet) db; + // Disable the central repo fields hashDbVersionLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbOrgLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); - + // Enable the delete button if ingest is not running deleteDatabaseButton.setEnabled(!ingestIsRunning); - + try { hashDbLocationLabel.setText(shortenPath(db.getDatabasePath())); } catch (TskCoreException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT); } - + try { indexPathLabel.setText(shortenPath(hashDb.getIndexPath())); } catch (TskCoreException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS indexPathLabel.setText(ERROR_GETTING_PATH_TEXT); } - + // Update indexing components. try { if (hashDb.isIndexing()) { @@ -245,15 +244,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan indexButton.setEnabled(false); } } else { - + // Disable the file type fields/buttons indexPathLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbIndexStatusLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable()); hashDbLocationLabel.setText(Bundle.HashLookupSettingsPanel_centralRepo()); indexButton.setEnabled(false); deleteDatabaseButton.setEnabled(false); - - CentralRepoHashSet crDb = (CentralRepoHashSet)db; + + CentralRepoHashSet crDb = (CentralRepoHashSet) db; hashDbVersionLabel.setText(crDb.getVersion()); hashDbOrgLabel.setText(crDb.getOrgName()); @@ -302,13 +301,17 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan @Override @Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.", "HashLookupSettingsPanel.saveFail.title=Save Fail"}) - public void saveSettings() { + public void saveSettings() { + + // Clear out the list of unsaved hashes + newReferenceSetIDs.clear(); + //Checking for for any unindexed databases List unindexed = new ArrayList<>(); for (HashDb db : hashSetManager.getAllHashSets()) { - if(db instanceof SleuthkitHashSet){ + if (db instanceof SleuthkitHashSet) { try { - SleuthkitHashSet hashDatabase = (SleuthkitHashSet)db; + SleuthkitHashSet hashDatabase = (SleuthkitHashSet) db; if (!hashDatabase.hasIndex()) { unindexed.add(hashDatabase); } @@ -320,10 +323,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // If there are unindexed databases, give the user the option to index them now. This // needs to be on the EDT, and will save the hash settings after completing - if(! unindexed.isEmpty()){ - SwingUtilities.invokeLater(new Runnable(){ + if (!unindexed.isEmpty()) { + SwingUtilities.invokeLater(new Runnable() { @Override - public void run(){ + public void run() { //If unindexed ones are found, show a popup box that will either index them, or remove them. if (unindexed.size() == 1) { showInvalidIndex(false, unindexed); @@ -335,7 +338,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } else { try { hashSetManager.save(); - newReferenceSetIDs.clear(); } catch (HashDbManager.HashDbManagerException ex) { SwingUtilities.invokeLater(() -> { JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE); @@ -363,20 +365,20 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan */ if (IngestManager.getInstance().isIngestRunning() == false) { // Remove any new central repo hash sets from the database - for(int refID:newReferenceSetIDs){ - try{ - if(EamDb.isEnabled()){ + for (int refID : newReferenceSetIDs) { + try { + if (EamDb.isEnabled()) { EamDb.getInstance().deleteReferenceSet(refID); } else { // This is the case where the user imported a database, then switched over to the central // repo panel and disabled it before cancelling. We can't delete the database at this point. Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.WARNING, "Error reverting central repository hash sets"); //NON-NLS } - } catch (EamDbException ex){ + } catch (EamDbException ex) { Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS } } - + HashDbManager.getInstance().loadLastSavedConfiguration(); } } @@ -398,7 +400,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan * unindexed, along with solutions. This method is related to * ModalNoButtons, to be removed at a later date. * - * @param plural Whether or not there are multiple unindexed databases + * @param plural Whether or not there are multiple unindexed databases * @param unindexed The list of unindexed databases. Can be of size 1. */ private void showInvalidIndex(boolean plural, List unindexed) { @@ -471,8 +473,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan getSelectionModel().setSelectionInterval(index, index); } } - - public void selectRowByDatabase(HashDb db){ + + public void selectRowByDatabase(HashDb db) { setSelection(hashSetTableModel.getIndexByDatabase(db)); } @@ -510,7 +512,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan return hashSets.get(rowIndex).getDisplayName(); } - private boolean isValid(int rowIndex) { + private boolean isValid(int rowIndex) { try { return hashSets.get(rowIndex).isValid(); } catch (TskCoreException ex) { @@ -543,15 +545,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan } } - int getIndexByDatabase(HashDb db){ + int getIndexByDatabase(HashDb db) { for (int i = 0; i < hashSets.size(); ++i) { if (hashSets.get(i).equals(db)) { return i; } } - return -1; + return -1; } - + @Deprecated int getIndexByName(String name) { for (int i = 0; i < hashSets.size(); ++i) { @@ -934,11 +936,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan private void createDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createDatabaseButtonActionPerformed HashDb hashDb = new HashDbCreateDatabaseDialog().getHashDatabase(); if (null != hashDb) { - if(hashDb instanceof CentralRepoHashSet){ - int newDbIndex = ((CentralRepoHashSet)hashDb).getReferenceSetID(); + if (hashDb instanceof CentralRepoHashSet) { + int newDbIndex = ((CentralRepoHashSet) hashDb).getReferenceSetID(); newReferenceSetIDs.add(newDbIndex); } - + hashSetTableModel.refreshModel(); ((HashSetTable) hashSetTable).selectRowByDatabase(hashDb); firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); @@ -960,7 +962,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan // Add a listener for the INDEXING_DONE event. This listener will update // the UI. - SleuthkitHashSet hashDb = (SleuthkitHashSet)hashDatabase; + SleuthkitHashSet hashDb = (SleuthkitHashSet) hashDatabase; hashDb.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { @@ -988,11 +990,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan private void importDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importDatabaseButtonActionPerformed HashDb hashDb = new HashDbImportDatabaseDialog().getHashDatabase(); if (null != hashDb) { - if(hashDb instanceof CentralRepoHashSet){ - int newReferenceSetID = ((CentralRepoHashSet)hashDb).getReferenceSetID(); + if (hashDb instanceof CentralRepoHashSet) { + int newReferenceSetID = ((CentralRepoHashSet) hashDb).getReferenceSetID(); newReferenceSetIDs.add(newReferenceSetID); } - + hashSetTableModel.refreshModel(); ((HashSetTable) hashSetTable).selectRowByDatabase(hashDb); firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); @@ -1002,21 +1004,21 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan @Messages({}) private void deleteDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteDatabaseButtonActionPerformed if (JOptionPane.showConfirmDialog(null, - NbBundle.getMessage(this.getClass(), - "HashDbConfigPanel.deleteDbActionConfirmMsg"), - NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"), - JOptionPane.YES_NO_OPTION, - JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { - HashDb hashDb = ((HashSetTable) hashSetTable).getSelection(); - if (hashDb != null) { - try { - hashSetManager.removeHashDatabaseNoSave(hashDb); - } catch (HashDbManager.HashDbManagerException ex) { - JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName())); + NbBundle.getMessage(this.getClass(), + "HashDbConfigPanel.deleteDbActionConfirmMsg"), + NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"), + JOptionPane.YES_NO_OPTION, + JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) { + HashDb hashDb = ((HashSetTable) hashSetTable).getSelection(); + if (hashDb != null) { + try { + hashSetManager.removeHashDatabaseNoSave(hashDb); + } catch (HashDbManager.HashDbManagerException ex) { + JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName())); + } + hashSetTableModel.refreshModel(); + firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); } - hashSetTableModel.refreshModel(); - firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null); - } } }//GEN-LAST:event_deleteDatabaseButtonActionPerformed diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java index fc45856af9..8a7a3ae034 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashSetParser.java @@ -21,27 +21,31 @@ package org.sleuthkit.autopsy.modules.hashdatabase; import org.sleuthkit.datamodel.TskCoreException; interface HashSetParser { - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ String getNextHash() throws TskCoreException; - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ boolean doneReading(); - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ long getExpectedHashCount(); - + /** * Closes the import file */ diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java index af66b994e1..0c1b694e1b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/IdxHashSetParser.java @@ -31,35 +31,38 @@ import org.sleuthkit.datamodel.TskCoreException; * Parser for idx files (*.idx) */ class IdxHashSetParser implements HashSetParser { + private final String filename; // Name of the input file (saved for logging) private BufferedReader reader; // Input file private final long totalHashes; // Estimated number of hashes private boolean doneReading = false; // Flag for if we've hit the end of the file - - IdxHashSetParser(String filename) throws TskCoreException{ + + IdxHashSetParser(String filename) throws TskCoreException { this.filename = filename; - try{ + try { reader = new BufferedReader(new FileReader(filename)); - } catch (FileNotFoundException ex){ + } catch (FileNotFoundException ex) { throw new TskCoreException("Error opening file " + filename, ex); } - + // Estimate the total number of hashes in the file since counting them all can be slow File importFile = new File(filename); long fileSize = importFile.length(); totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero } - + /** * Get the next hash to import - * @return The hash as a string, or null if the end of file was reached without error - * @throws TskCoreException + * + * @return The hash as a string, or null if the end of file was reached + * without error + * @throws TskCoreException */ @Override public String getNextHash() throws TskCoreException { String line; - - try{ + + try { while ((line = reader.readLine()) != null) { String[] parts = line.split("\\|"); @@ -68,45 +71,47 @@ class IdxHashSetParser implements HashSetParser { if (parts.length != 2 || parts[0].length() == 41) { continue; } - + return parts[0].toLowerCase(); } - } catch (IOException ex){ + } catch (IOException ex) { throw new TskCoreException("Error reading file " + filename, ex); } - + // We've run out of data doneReading = true; return null; } - + /** * Check if there are more hashes to read + * * @return true if we've read all expected hash values, false otherwise */ @Override public boolean doneReading() { return doneReading; } - + /** - * Get the expected number of hashes in the file. - * This number can be an estimate. + * Get the expected number of hashes in the file. This number can be an + * estimate. + * * @return The expected hash count */ @Override public long getExpectedHashCount() { return totalHashes; } - + /** * Closes the import file */ @Override public void close() { - try{ + try { reader.close(); - } catch (IOException ex){ + } catch (IOException ex) { Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex); } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java index 0a6e6cf644..3421a06044 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java @@ -45,31 +45,31 @@ import org.sleuthkit.datamodel.TskData; /** * Imports a hash set into the central repository and updates a progress dialog */ -class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{ +class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener { private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog - @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress", - }) + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",}) ImportCentralRepoDbProgressDialog() { super((JFrame) WindowManager.getDefault().getMainWindow(), Bundle.ImportCentralRepoDbProgressDialog_title_text(), true); - - initComponents(); + + initComponents(); customizeComponents(); } - - private void customizeComponents(){ + + private void customizeComponents() { // This is preventing the user from closing the dialog using the X setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE); - + bnOk.setEnabled(false); } - + /** - * Import the selected hash set into the central repository. - * Will bring up a progress dialog while the import is in progress. + * Import the selected hash set into the central repository. Will bring up a + * progress dialog while the import is in progress. + * * @param hashSetName * @param version * @param orgId @@ -77,57 +77,57 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P * @param sendIngestMessages * @param knownFilesType * @param readOnly - * @param importFileName + * @param importFileName */ void importFile(String hashSetName, String version, int orgId, boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, String importFileName){ + boolean readOnly, String importFileName) { - worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, + worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFileName); worker.addPropertyChangeListener(this); worker.execute(); - - setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow()); + + setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow()); this.setVisible(true); } - + /** - * Get the HashDb object for the newly imported data. - * Should be called after importFile completes. + * Get the HashDb object for the newly imported data. Should be called after + * importFile completes. + * * @return The new HashDb object or null if the import failed/was canceled */ - HashDbManager.HashDb getDatabase(){ - if(worker != null){ + HashDbManager.HashDb getDatabase() { + if (worker != null) { return worker.getDatabase(); } return null; } - - + /** - * Updates the dialog from events from the worker. - * The two events we handle are progress updates and - * the done event. - * @param evt + * Updates the dialog from events from the worker. The two events we handle + * are progress updates and the done event. + * + * @param evt */ @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"}) @Override public void propertyChange(PropertyChangeEvent evt) { - - if("progress".equals(evt.getPropertyName())){ + + if ("progress".equals(evt.getPropertyName())) { // The progress has been updated. Update the progress bar and text progressBar.setValue(worker.getProgress()); lbProgress.setText(getProgressString()); } else if ("state".equals(evt.getPropertyName()) && (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) { - + // The worker is done processing // Disable cancel button and enable ok bnCancel.setEnabled(false); bnOk.setEnabled(true); - - if(worker.getImportSuccess()){ + + if (worker.getImportSuccess()) { // If the import succeeded, finish the progress bar and display the // total number of imported hashes progressBar.setValue(progressBar.getMaximum()); @@ -140,13 +140,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } } } - + @NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"}) - private String getProgressString(){ + private String getProgressString() { return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message(); } - - private class CentralRepoImportWorker extends SwingWorker{ + + private class CentralRepoImportWorker extends SwingWorker { + private final int HASH_IMPORT_THRESHOLD = 10000; private final String hashSetName; private final String version; @@ -160,11 +161,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private final AtomicInteger referenceSetID = new AtomicInteger(); private final AtomicLong hashCount = new AtomicLong(); private final AtomicBoolean importSuccess = new AtomicBoolean(); - + CentralRepoImportWorker(String hashSetName, String version, int orgId, - boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, - boolean readOnly, String importFileName){ - + boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType, + boolean readOnly, String importFileName) { + this.hashSetName = hashSetName; this.version = version; this.orgId = orgId; @@ -177,48 +178,57 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P this.importSuccess.set(false); this.referenceSetID.set(-1); } - + /** * Get the newly created database - * @return the imported database. May be null if an error occurred or the user canceled + * + * @return the imported database. May be null if an error occurred or + * the user canceled */ - synchronized HashDbManager.CentralRepoHashSet getDatabase(){ + synchronized HashDbManager.CentralRepoHashSet getDatabase() { return newHashDb; } - + /** * Get the number of hashes that have been read in so far + * * @return current hash count */ - long getNumHashesProcessed(){ + long getNumHashesProcessed() { return hashCount.get(); } - + /** * Check if the import was successful or if there was an error. - * @return true if the import process completed without error, false otherwise + * + * @return true if the import process completed without error, false + * otherwise */ - boolean getImportSuccess(){ + boolean getImportSuccess() { return importSuccess.get(); } - + @Override protected Void doInBackground() throws Exception { - + // Create the hash set parser HashSetParser hashSetParser; - if(importFileName.toLowerCase().endsWith(".idx")){ + if (importFileName.toLowerCase().endsWith(".idx")) { hashSetParser = new IdxHashSetParser(importFileName); } else if(importFileName.toLowerCase().endsWith(".hash")){ hashSetParser = new EncaseHashSetParser(importFileName); } else if(importFileName.toLowerCase().endsWith(".kdb")){ hashSetParser = new KdbHashSetParser(importFileName); } else { - // We've gotten here with a format that can't be processed - throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); + if (importFileName.toLowerCase().endsWith(".hash")) { + hashSetParser = new EncaseHashSetParser(importFileName); + } else { + // We've gotten here with a format that can't be processed + throw new TskCoreException("Hash set to import is an unknown format : " + importFileName); + } } - try{ + try { // Conver to the FileKnown enum used by EamGlobalSet TskData.FileKnown knownStatus; if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) { @@ -226,7 +236,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P } else { knownStatus = TskData.FileKnown.BAD; } - + // Create an empty hashset in the central repository EamDb dbManager = EamDb.getInstance(); referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly))); @@ -238,30 +248,30 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P // Holds the current batch of hashes that need to be written to the central repo Set globalInstances = new HashSet<>(); - while (! hashSetParser.doneReading()) { - if(isCancelled()){ + while (!hashSetParser.doneReading()) { + if (isCancelled()) { return null; } String newHash = hashSetParser.getNextHash(); - if(newHash != null){ + if (newHash != null) { EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance( - referenceSetID.get(), - newHash, - knownStatus, + referenceSetID.get(), + newHash, + knownStatus, ""); globalInstances.add(eamGlobalFileInstance); // If we've hit the threshold for writing the hashes, write them // all to the central repo - if(hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){ + if (hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0) { dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType); globalInstances.clear(); - int progress = (int)(hashCount.get() * 100 / hashSetParser.getExpectedHashCount()); - if(progress < 100){ + int progress = (int) (hashCount.get() * 100 / hashSetParser.getExpectedHashCount()); + if (progress < 100) { this.setProgress(progress); } else { this.setProgress(99); @@ -278,41 +288,41 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P hashSetParser.close(); } } - - private void deleteIncompleteSet(){ - if(referenceSetID.get() >= 0){ - + + private void deleteIncompleteSet() { + if (referenceSetID.get() >= 0) { + // This can be slow on large reference sets Executors.newSingleThreadExecutor().execute(new Runnable() { - @Override + @Override public void run() { - try{ + try { EamDb.getInstance().deleteReferenceSet(referenceSetID.get()); - } catch (EamDbException ex2){ + } catch (EamDbException ex2) { Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2); } } }); } } - + @Override synchronized protected void done() { - - if(isCancelled()){ + + if (isCancelled()) { // If the user hit cancel, delete this incomplete hash set from the central repo deleteIncompleteSet(); return; } - + try { get(); - try{ - newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, - referenceSetID.get(), + try { + newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version, + referenceSetID.get(), searchDuringIngest, sendIngestMessages, knownFilesType, readOnly); importSuccess.set(true); - } catch (TskCoreException ex){ + } catch (TskCoreException ex) { Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex); } } catch (Exception ex) { @@ -320,10 +330,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P deleteIncompleteSet(); Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex); } - } - + } + } - + /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always @@ -417,4 +427,4 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P private javax.swing.JLabel lbProgress; private javax.swing.JProgressBar progressBar; // End of variables declaration//GEN-END:variables -} \ No newline at end of file +}