Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 3202-TagsMessagingOnStatusChange

This commit is contained in:
William Schaefer 2017-11-22 12:22:36 -05:00
commit 84181a922c
11 changed files with 766 additions and 255 deletions

View File

@ -94,14 +94,6 @@ public interface EamDb {
return EamDbUtil.useCentralRepo()
&& EamDbPlatformEnum.getSelectedPlatform() != EamDbPlatformEnum.DISABLED;
}
/**
* Placeholder version to use for non-read only databases
* @return The version that will be stored in the database
*/
static String getDefaultVersion() {
return "";
}
/**
* Add a new name/value pair in the db_info table.

View File

@ -0,0 +1,168 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.io.InputStream;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Parser for Encase format hash sets (*.hash)
*/
class EncaseHashSetParser implements HashSetParser {
private final byte[] encaseHeader = {(byte) 0x48, (byte) 0x41, (byte) 0x53, (byte) 0x48, (byte) 0x0d, (byte) 0x0a, (byte) 0xff, (byte) 0x00,
(byte) 0x02, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00};
private final String filename; // Name of the input file (saved for logging)
private InputStream inputStream; // File stream for file being imported
private final long expectedHashCount; // Number of hashes we expect to read from the file
private int totalHashesRead = 0; // Number of hashes that have been read
/**
* Opens the import file and parses the header. If this is successful, the
* file will be set up to call getNextHash() to read the hash values.
*
* @param filename The Encase hash set
* @throws TskCoreException There was an error opening/reading the file or
* it is not the correct format
*/
EncaseHashSetParser(String filename) throws TskCoreException {
try {
this.filename = filename;
inputStream = new BufferedInputStream(new FileInputStream(filename));
// Read in and test the 16 byte header
byte[] header = new byte[16];
readBuffer(header, 16);
if (!Arrays.equals(header, encaseHeader)) {
close();
throw new TskCoreException("File " + filename + " does not have an Encase header");
}
// Read in the expected number of hashes (little endian)
byte[] sizeBuffer = new byte[4];
readBuffer(sizeBuffer, 4);
expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
// Read in a bunch of nulls
byte[] filler = new byte[0x3f4];
readBuffer(filler, 0x3f4);
// Read in the hash set name
byte[] nameBuffer = new byte[0x50];
readBuffer(nameBuffer, 0x50);
// Read in the hash set type
byte[] typeBuffer = new byte[0x28];
readBuffer(typeBuffer, 0x28);
// At this point we're past the header and ready to read in the hashes
} catch (IOException ex) {
close();
throw new TskCoreException("Error reading " + filename, ex);
} catch (TskCoreException ex) {
close();
throw ex;
}
}
/**
* Get the expected number of hashes in the file. This number can be an
* estimate.
*
* @return The expected hash count
*/
@Override
public long getExpectedHashCount() {
return expectedHashCount;
}
/**
* Check if there are more hashes to read
*
* @return true if we've read all expected hash values, false otherwise
*/
@Override
public boolean doneReading() {
return (totalHashesRead >= expectedHashCount);
}
/**
* Get the next hash to import
*
* @return The hash as a string, or null if the end of file was reached
* without error
* @throws TskCoreException
*/
@Override
public String getNextHash() throws TskCoreException {
if (inputStream == null) {
throw new TskCoreException("Attempting to read from null inputStream");
}
byte[] hashBytes = new byte[16];
byte[] divider = new byte[2];
try {
readBuffer(hashBytes, 16);
readBuffer(divider, 2);
StringBuilder sb = new StringBuilder();
for (byte b : hashBytes) {
sb.append(String.format("%02x", b));
}
totalHashesRead++;
return sb.toString();
} catch (IOException ex) {
throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex);
}
}
/**
* Closes the import file
*/
@Override
public final void close() {
if (inputStream != null) {
try {
inputStream.close();
} catch (IOException ex) {
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex);
} finally {
inputStream = null;
}
}
}
private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
if (inputStream == null) {
throw new TskCoreException("readBuffer called on null inputStream");
}
if (length != inputStream.read(buffer)) {
throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename);
}
}
}

View File

@ -423,7 +423,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
}//GEN-LAST:event_saveAsButtonActionPerformed
@NbBundle.Messages({"HashDbCreateDatabaseDialog.missingOrg=An organization must be selected",
"HashDbCreateDatabaseDialog.duplicateName=A hashset with this name and version already exists",
"HashDbCreateDatabaseDialog.duplicateName=A hashset with this name already exists",
"HashDbCreateDatabaseDialog.databaseLookupError=Error accessing central repository",
"HashDbCreateDatabaseDialog.databaseCreationError=Error creating new hash set"
})
@ -500,7 +500,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
} else {
// Check if a hash set with the same name/version already exists
try{
if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), EamDb.getDefaultVersion())){
if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), "")){
JOptionPane.showMessageDialog(this,
NbBundle.getMessage(this.getClass(),
"HashDbCreateDatabaseDialog.duplicateName"),
@ -522,9 +522,9 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
try{
int referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(selectedOrg.getOrgID(), hashSetNameTextField.getText(),
EamDb.getDefaultVersion(), fileKnown, false));
"", fileKnown, false));
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetNameTextField.getText(),
EamDb.getDefaultVersion(), referenceSetID,
"", referenceSetID,
true, sendIngestMessagesCheckbox.isSelected(), type, false);
} catch (EamDbException | TskCoreException ex){
Logger.getLogger(HashDbImportDatabaseDialog.class.getName()).log(Level.SEVERE, "Error creating new reference set", ex);

View File

@ -88,11 +88,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
fileChooser.setMultiSelectionEnabled(false);
}
@NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx only)"})
@NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.kdb, .idx or .hash)"})
private void updateFileChooserFilter() {
fileChooser.resetChoosableFileFilters();
if(centralRepoRadioButton.isSelected()){
String[] EXTENSION = new String[]{"idx"}; //NON-NLS
String[] EXTENSION = new String[]{"kdb", "idx", "hash", "Hash"}; //NON-NLS
FileNameExtensionFilter filter = new FileNameExtensionFilter(
NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.centralRepoExtFilter.text"), EXTENSION);
fileChooser.setFileFilter(filter);
@ -447,7 +447,8 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
@NbBundle.Messages({"HashDbImportDatabaseDialog.missingVersion=A version must be entered",
"HashDbImportDatabaseDialog.missingOrg=An organization must be selected",
"HashDbImportDatabaseDialog.duplicateName=A hashset with this name and version already exists",
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository"
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository",
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered."
})
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed
// Note that the error handlers in this method call return without disposing of the
@ -456,7 +457,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
if (hashSetNameTextField.getText().isEmpty()) {
JOptionPane.showMessageDialog(this,
NbBundle.getMessage(this.getClass(),
"HashDbCreateDatabaseDialog.mustEnterHashSetNameMsg"),
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg"),
NbBundle.getMessage(this.getClass(),
"HashDbImportDatabaseDialog.importHashDbErr"),
JOptionPane.ERROR_MESSAGE);
@ -464,7 +465,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
}
if(centralRepoRadioButton.isSelected()){
if(versionTextField.getText().isEmpty()){
if(readOnlyCheckbox.isSelected() && versionTextField.getText().isEmpty()){
JOptionPane.showMessageDialog(this,
NbBundle.getMessage(this.getClass(),
"HashDbImportDatabaseDialog.missingVersion"),
@ -557,7 +558,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
version = versionTextField.getText();
} else {
// Editable databases don't have a version
version = EamDb.getDefaultVersion();
version = "";
}
ImportCentralRepoDbProgressDialog progressDialog = new ImportCentralRepoDbProgressDialog();
progressDialog.importFile(hashSetNameTextField.getText(), version,

View File

@ -728,8 +728,6 @@ public class HashDbManager implements PropertyChangeListener {
public abstract HashDb.KnownFilesType getKnownFilesType();
public abstract boolean getSearchDuringIngest();
abstract boolean getDefaultSearchDuringIngest();
abstract void setSearchDuringIngest(boolean useForIngest);
@ -869,12 +867,6 @@ public class HashDbManager implements PropertyChangeListener {
public boolean getSearchDuringIngest() {
return searchDuringIngest;
}
@Override
boolean getDefaultSearchDuringIngest(){
// File type hash sets are on by default
return true;
}
@Override
void setSearchDuringIngest(boolean useForIngest) {
@ -1176,12 +1168,6 @@ public class HashDbManager implements PropertyChangeListener {
public boolean getSearchDuringIngest() {
return searchDuringIngest;
}
@Override
boolean getDefaultSearchDuringIngest(){
// Central repo hash sets are off by default
return false;
}
@Override
void setSearchDuringIngest(boolean useForIngest) {

View File

@ -135,8 +135,8 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings {
}
}
// We didn't find it, so use the default value
return db.getDefaultSearchDuringIngest();
// We didn't find it, so use the value in the HashDb object
return db.getSearchDuringIngest();
}
/**

View File

@ -127,7 +127,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
hashDbOrgLabel.setText(NO_SELECTION_TEXT);
hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT);
indexPathLabel.setText(NO_SELECTION_TEXT);
// Update indexing components.
hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT);
@ -162,14 +161,14 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
// Update descriptive labels.
hashDbNameLabel.setText(db.getHashSetName());
hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName());
try{
if(db.isUpdateable()){
hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName());
try {
if (db.isUpdateable()) {
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_editable());
} else {
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_readOnly());
}
} catch (TskCoreException ex){
} catch (TskCoreException ex) {
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_updateStatusError());
}
@ -180,30 +179,30 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
addHashesToDatabaseButton.setEnabled(false);
}
if(db instanceof SleuthkitHashSet){
SleuthkitHashSet hashDb = (SleuthkitHashSet)db;
if (db instanceof SleuthkitHashSet) {
SleuthkitHashSet hashDb = (SleuthkitHashSet) db;
// Disable the central repo fields
hashDbVersionLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
hashDbOrgLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
// Enable the delete button if ingest is not running
deleteDatabaseButton.setEnabled(!ingestIsRunning);
try {
hashDbLocationLabel.setText(shortenPath(db.getDatabasePath()));
} catch (TskCoreException ex) {
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT);
}
try {
indexPathLabel.setText(shortenPath(hashDb.getIndexPath()));
} catch (TskCoreException ex) {
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
indexPathLabel.setText(ERROR_GETTING_PATH_TEXT);
}
// Update indexing components.
try {
if (hashDb.isIndexing()) {
@ -245,15 +244,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
indexButton.setEnabled(false);
}
} else {
// Disable the file type fields/buttons
indexPathLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
hashDbIndexStatusLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
hashDbLocationLabel.setText(Bundle.HashLookupSettingsPanel_centralRepo());
indexButton.setEnabled(false);
deleteDatabaseButton.setEnabled(false);
CentralRepoHashSet crDb = (CentralRepoHashSet)db;
CentralRepoHashSet crDb = (CentralRepoHashSet) db;
hashDbVersionLabel.setText(crDb.getVersion());
hashDbOrgLabel.setText(crDb.getOrgName());
@ -302,13 +301,17 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
@Override
@Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.",
"HashLookupSettingsPanel.saveFail.title=Save Fail"})
public void saveSettings() {
public void saveSettings() {
// Clear out the list of new central repo hash sets. They don't need to be
// indexed so will all be saved on both code paths.
newReferenceSetIDs.clear();
//Checking for for any unindexed databases
List<SleuthkitHashSet> unindexed = new ArrayList<>();
for (HashDb db : hashSetManager.getAllHashSets()) {
if(db instanceof SleuthkitHashSet){
if (db instanceof SleuthkitHashSet) {
try {
SleuthkitHashSet hashDatabase = (SleuthkitHashSet)db;
SleuthkitHashSet hashDatabase = (SleuthkitHashSet) db;
if (!hashDatabase.hasIndex()) {
unindexed.add(hashDatabase);
}
@ -320,10 +323,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
// If there are unindexed databases, give the user the option to index them now. This
// needs to be on the EDT, and will save the hash settings after completing
if(! unindexed.isEmpty()){
SwingUtilities.invokeLater(new Runnable(){
if (!unindexed.isEmpty()) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run(){
public void run() {
//If unindexed ones are found, show a popup box that will either index them, or remove them.
if (unindexed.size() == 1) {
showInvalidIndex(false, unindexed);
@ -362,20 +365,19 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
*/
if (IngestManager.getInstance().isIngestRunning() == false) {
// Remove any new central repo hash sets from the database
for(int refID:newReferenceSetIDs){
try{
if(EamDb.isEnabled()){
for (int refID : newReferenceSetIDs) {
try {
if (EamDb.isEnabled()) {
EamDb.getInstance().deleteReferenceSet(refID);
} else {
// This is the case where the user imported a database, then switched over to the central
// repo panel and disabled it before cancelling. We can't delete the database at this point.
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.WARNING, "Error reverting central repository hash sets"); //NON-NLS
}
} catch (EamDbException ex){
} catch (EamDbException ex) {
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS
}
}
HashDbManager.getInstance().loadLastSavedConfiguration();
}
}
@ -397,7 +399,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
* unindexed, along with solutions. This method is related to
* ModalNoButtons, to be removed at a later date.
*
* @param plural Whether or not there are multiple unindexed databases
* @param plural Whether or not there are multiple unindexed databases
* @param unindexed The list of unindexed databases. Can be of size 1.
*/
private void showInvalidIndex(boolean plural, List<SleuthkitHashSet> unindexed) {
@ -470,8 +472,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
getSelectionModel().setSelectionInterval(index, index);
}
}
public void selectRowByDatabase(HashDb db){
public void selectRowByDatabase(HashDb db) {
setSelection(hashSetTableModel.getIndexByDatabase(db));
}
@ -509,7 +511,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
return hashSets.get(rowIndex).getDisplayName();
}
private boolean isValid(int rowIndex) {
private boolean isValid(int rowIndex) {
try {
return hashSets.get(rowIndex).isValid();
} catch (TskCoreException ex) {
@ -542,15 +544,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
}
}
int getIndexByDatabase(HashDb db){
int getIndexByDatabase(HashDb db) {
for (int i = 0; i < hashSets.size(); ++i) {
if (hashSets.get(i).equals(db)) {
return i;
}
}
return -1;
return -1;
}
@Deprecated
int getIndexByName(String name) {
for (int i = 0; i < hashSets.size(); ++i) {
@ -933,11 +935,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
private void createDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createDatabaseButtonActionPerformed
HashDb hashDb = new HashDbCreateDatabaseDialog().getHashDatabase();
if (null != hashDb) {
if(hashDb instanceof CentralRepoHashSet){
int newDbIndex = ((CentralRepoHashSet)hashDb).getReferenceSetID();
if (hashDb instanceof CentralRepoHashSet) {
int newDbIndex = ((CentralRepoHashSet) hashDb).getReferenceSetID();
newReferenceSetIDs.add(newDbIndex);
}
hashSetTableModel.refreshModel();
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
@ -959,7 +961,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
// Add a listener for the INDEXING_DONE event. This listener will update
// the UI.
SleuthkitHashSet hashDb = (SleuthkitHashSet)hashDatabase;
SleuthkitHashSet hashDb = (SleuthkitHashSet) hashDatabase;
hashDb.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
@ -987,11 +989,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
private void importDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importDatabaseButtonActionPerformed
HashDb hashDb = new HashDbImportDatabaseDialog().getHashDatabase();
if (null != hashDb) {
if(hashDb instanceof CentralRepoHashSet){
int newReferenceSetID = ((CentralRepoHashSet)hashDb).getReferenceSetID();
if (hashDb instanceof CentralRepoHashSet) {
int newReferenceSetID = ((CentralRepoHashSet) hashDb).getReferenceSetID();
newReferenceSetIDs.add(newReferenceSetID);
}
hashSetTableModel.refreshModel();
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
@ -1001,21 +1003,21 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
@Messages({})
private void deleteDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteDatabaseButtonActionPerformed
if (JOptionPane.showConfirmDialog(null,
NbBundle.getMessage(this.getClass(),
"HashDbConfigPanel.deleteDbActionConfirmMsg"),
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"),
JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) {
HashDb hashDb = ((HashSetTable) hashSetTable).getSelection();
if (hashDb != null) {
try {
hashSetManager.removeHashDatabaseNoSave(hashDb);
} catch (HashDbManager.HashDbManagerException ex) {
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName()));
NbBundle.getMessage(this.getClass(),
"HashDbConfigPanel.deleteDbActionConfirmMsg"),
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"),
JOptionPane.YES_NO_OPTION,
JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) {
HashDb hashDb = ((HashSetTable) hashSetTable).getSelection();
if (hashDb != null) {
try {
hashSetManager.removeHashDatabaseNoSave(hashDb);
} catch (HashDbManager.HashDbManagerException ex) {
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName()));
}
hashSetTableModel.refreshModel();
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
}
hashSetTableModel.refreshModel();
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
}
}
}//GEN-LAST:event_deleteDatabaseButtonActionPerformed

View File

@ -0,0 +1,53 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import org.sleuthkit.datamodel.TskCoreException;
interface HashSetParser {
/**
* Get the next hash to import
*
* @return The hash as a string, or null if the end of file was reached
* without error
* @throws TskCoreException
*/
String getNextHash() throws TskCoreException;
/**
* Check if there are more hashes to read
*
* @return true if we've read all expected hash values, false otherwise
*/
boolean doneReading();
/**
* Get the expected number of hashes in the file. This number can be an
* estimate.
*
* @return The expected hash count
*/
long getExpectedHashCount();
/**
* Closes the import file
*/
void close();
}

View File

@ -0,0 +1,118 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Parser for idx files (*.idx)
*/
class IdxHashSetParser implements HashSetParser {
private final String filename; // Name of the input file (saved for logging)
private BufferedReader reader; // Input file
private final long totalHashes; // Estimated number of hashes
private boolean doneReading = false; // Flag for if we've hit the end of the file
IdxHashSetParser(String filename) throws TskCoreException {
this.filename = filename;
try {
reader = new BufferedReader(new FileReader(filename));
} catch (FileNotFoundException ex) {
throw new TskCoreException("Error opening file " + filename, ex);
}
// Estimate the total number of hashes in the file since counting them all can be slow
File importFile = new File(filename);
long fileSize = importFile.length();
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero
}
/**
* Get the next hash to import
*
* @return The hash as a string, or null if the end of file was reached
* without error
* @throws TskCoreException
*/
@Override
public String getNextHash() throws TskCoreException {
String line;
try {
while ((line = reader.readLine()) != null) {
String[] parts = line.split("\\|");
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
if (parts.length != 2 || parts[0].length() == 41) {
continue;
}
return parts[0].toLowerCase();
}
} catch (IOException ex) {
throw new TskCoreException("Error reading file " + filename, ex);
}
// We've run out of data
doneReading = true;
return null;
}
/**
* Check if there are more hashes to read
*
* @return true if we've read all expected hash values, false otherwise
*/
@Override
public boolean doneReading() {
return doneReading;
}
/**
* Get the expected number of hashes in the file. This number can be an
* estimate.
*
* @return The expected hash count
*/
@Override
public long getExpectedHashCount() {
return totalHashes;
}
/**
* Closes the import file
*/
@Override
public void close() {
try {
reader.close();
} catch (IOException ex) {
Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex);
}
}
}

View File

@ -18,12 +18,9 @@
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.awt.Cursor;
import java.awt.Color;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeEvent;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.HashSet;
import java.util.Set;
import java.util.logging.Level;
@ -31,8 +28,9 @@ import javax.swing.JFrame;
import javax.swing.SwingWorker;
import javax.swing.WindowConstants;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.Executors;
import javax.swing.JOptionPane;
import org.openide.util.NbBundle;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute;
@ -45,95 +43,111 @@ import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
*
* Imports a hash set into the central repository and updates a progress dialog
*/
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener {
private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",})
ImportCentralRepoDbProgressDialog() {
super((JFrame) WindowManager.getDefault().getMainWindow(),
Bundle.ImportCentralRepoDbProgressDialog_title_text(),
true);
initComponents();
customizeComponents();
}
private void customizeComponents() {
// This is preventing the user from closing the dialog using the X
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
bnOk.setEnabled(false);
}
private CentralRepoImportWorker worker;
/**
*
* Import the selected hash set into the central repository. Will bring up a
* progress dialog while the import is in progress.
*
* @param hashSetName
* @param version
* @param orgId
* @param searchDuringIngest
* @param sendIngestMessages
* @param knownFilesType
* @param importFile
* @param readOnly
* @param importFileName
*/
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",
})
ImportCentralRepoDbProgressDialog() {
super((JFrame) WindowManager.getDefault().getMainWindow(),
Bundle.ImportCentralRepoDbProgressDialog_title_text(),
true);
initComponents();
customizeComponents();
}
private void customizeComponents(){
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
bnOk.setEnabled(false);
}
void importFile(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, String importFileName){
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
File importFile = new File(importFileName);
worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
knownFilesType, readOnly, importFile);
boolean readOnly, String importFileName) {
worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
knownFilesType, readOnly, importFileName);
worker.addPropertyChangeListener(this);
worker.execute();
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
this.setVisible(true);
}
HashDbManager.HashDb getDatabase(){
if(worker != null){
/**
* Get the HashDb object for the newly imported data. Should be called after
* importFile completes.
*
* @return The new HashDb object or null if the import failed/was canceled
*/
HashDbManager.HashDb getDatabase() {
if (worker != null) {
return worker.getDatabase();
}
return null;
}
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= lines processed"})
/**
* Updates the dialog from events from the worker. The two events we handle
* are progress updates and the done event.
*
* @param evt
*/
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"})
@Override
public void propertyChange(PropertyChangeEvent evt) {
if("progress".equals(evt.getPropertyName())){
progressBar.setValue(worker.getProgressPercentage());
if ("progress".equals(evt.getPropertyName())) {
// The progress has been updated. Update the progress bar and text
progressBar.setValue(worker.getProgress());
lbProgress.setText(getProgressString());
} else if ("state".equals(evt.getPropertyName())
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
// Disable cancel and enable ok
// The worker is done processing
// Disable cancel button and enable ok
bnCancel.setEnabled(false);
bnOk.setEnabled(true);
progressBar.setValue(progressBar.getMaximum());
lbProgress.setText(getProgressString());
if (worker.getImportSuccess()) {
// If the import succeeded, finish the progress bar and display the
// total number of imported hashes
progressBar.setValue(progressBar.getMaximum());
lbProgress.setText(getProgressString());
} else {
// If there was an error, reset the progress bar and display an error message
progressBar.setValue(0);
lbProgress.setForeground(Color.red);
lbProgress.setText(Bundle.ImportCentralRepoDbProgressDialog_errorParsingFile_message());
}
}
}
private String getProgressString(){
return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed();
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"})
private String getProgressString() {
return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message();
}
private interface CentralRepoImportWorker{
void execute();
boolean cancel(boolean mayInterruptIfRunning);
void addPropertyChangeListener(PropertyChangeListener dialog);
int getProgressPercentage();
long getLinesProcessed();
HashDbManager.HashDb getDatabase();
}
class ImportIDXWorker extends SwingWorker<Void,Void> implements CentralRepoImportWorker{
private class CentralRepoImportWorker extends SwingWorker<Void, Void> {
private final int HASH_IMPORT_THRESHOLD = 10000;
private final String hashSetName;
private final String version;
@ -142,16 +156,16 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
private final boolean sendIngestMessages;
private final HashDbManager.HashDb.KnownFilesType knownFilesType;
private final boolean readOnly;
private final File importFile;
private final long totalLines;
private int referenceSetID = -1;
private final String importFileName;
private HashDbManager.CentralRepoHashSet newHashDb = null;
private final AtomicLong numLines = new AtomicLong();
ImportIDXWorker(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, File importFile){
private final AtomicInteger referenceSetID = new AtomicInteger();
private final AtomicLong hashCount = new AtomicLong();
private final AtomicBoolean importSuccess = new AtomicBoolean();
CentralRepoImportWorker(String hashSetName, String version, int orgId,
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
boolean readOnly, String importFileName) {
this.hashSetName = hashSetName;
this.version = version;
this.orgId = orgId;
@ -159,146 +173,163 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
this.sendIngestMessages = sendIngestMessages;
this.knownFilesType = knownFilesType;
this.readOnly = readOnly;
this.importFile = importFile;
this.numLines.set(0);
this.totalLines = getEstimatedTotalHashes();
this.importFileName = importFileName;
this.hashCount.set(0);
this.importSuccess.set(false);
this.referenceSetID.set(-1);
}
/**
* Doing an actual count of the number of lines in a large idx file (such
* as the nsrl) is slow, so just get something in the general area for the
* progress bar.
* @return Approximate number of hashes in the file
* Get the newly created database
*
* @return the imported database. May be null if an error occurred or
* the user canceled
*/
final long getEstimatedTotalHashes(){
long fileSize = importFile.length();
return (fileSize / 0x33 + 1); // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero
}
@Override
public HashDbManager.HashDb getDatabase(){
synchronized HashDbManager.CentralRepoHashSet getDatabase() {
return newHashDb;
}
@Override
public long getLinesProcessed(){
return numLines.get();
/**
* Get the number of hashes that have been read in so far
*
* @return current hash count
*/
long getNumHashesProcessed() {
return hashCount.get();
}
@Override
public int getProgressPercentage(){
return this.getProgress();
/**
* Check if the import was successful or if there was an error.
*
* @return true if the import process completed without error, false
* otherwise
*/
boolean getImportSuccess() {
return importSuccess.get();
}
@Override
protected Void doInBackground() throws Exception {
TskData.FileKnown knownStatus;
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
knownStatus = TskData.FileKnown.KNOWN;
// Create the hash set parser
HashSetParser hashSetParser;
if (importFileName.toLowerCase().endsWith(".idx")) {
hashSetParser = new IdxHashSetParser(importFileName);
} else if(importFileName.toLowerCase().endsWith(".hash")){
hashSetParser = new EncaseHashSetParser(importFileName);
} else if(importFileName.toLowerCase().endsWith(".kdb")){
hashSetParser = new KdbHashSetParser(importFileName);
} else {
knownStatus = TskData.FileKnown.BAD;
// We've gotten here with a format that can't be processed
throw new TskCoreException("Hash set to import is an unknown format : " + importFileName);
}
// Create an empty hashset in the central repository
referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly));
EamDb dbManager = EamDb.getInstance();
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
BufferedReader reader = new BufferedReader(new FileReader(importFile));
String line;
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
while ((line = reader.readLine()) != null) {
if(isCancelled()){
return null;
try {
// Conver to the FileKnown enum used by EamGlobalSet
TskData.FileKnown knownStatus;
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
knownStatus = TskData.FileKnown.KNOWN;
} else {
knownStatus = TskData.FileKnown.BAD;
}
String[] parts = line.split("\\|");
// Create an empty hashset in the central repository
EamDb dbManager = EamDb.getInstance();
referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)));
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
if (parts.length != 2 || parts[0].length() == 41) {
continue;
}
// Get the "FILES" content type. This is a database lookup so we
// only want to do it once.
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID);
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
referenceSetID,
parts[0].toLowerCase(),
knownStatus,
"");
// Holds the current batch of hashes that need to be written to the central repo
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
globalInstances.add(eamGlobalFileInstance);
numLines.incrementAndGet();
while (!hashSetParser.doneReading()) {
if (isCancelled()) {
return null;
}
if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
globalInstances.clear();
String newHash = hashSetParser.getNextHash();
int progress = (int)(numLines.get() * 100 / totalLines);
if(progress < 100){
this.setProgress(progress);
} else {
this.setProgress(99);
if (newHash != null) {
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
referenceSetID.get(),
newHash,
knownStatus,
"");
globalInstances.add(eamGlobalFileInstance);
// If we've hit the threshold for writing the hashes, write them
// all to the central repo
if (hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0) {
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
globalInstances.clear();
int progress = (int) (hashCount.get() * 100 / hashSetParser.getExpectedHashCount());
if (progress < 100) {
this.setProgress(progress);
} else {
this.setProgress(99);
}
}
}
}
// Add any remaining hashes to the central repo
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
this.setProgress(100);
return null;
} finally {
hashSetParser.close();
}
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
this.setProgress(100);
return null;
}
private void deleteIncompleteSet(int idToDelete){
if(idToDelete >= 0){
private void deleteIncompleteSet() {
if (referenceSetID.get() >= 0) {
// This can be slow on large reference sets
Executors.newSingleThreadExecutor().execute(new Runnable() {
@Override
@Override
public void run() {
try{
EamDb.getInstance().deleteReferenceSet(idToDelete);
} catch (EamDbException ex2){
try {
EamDb.getInstance().deleteReferenceSet(referenceSetID.get());
} catch (EamDbException ex2) {
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
}
}
});
}
}
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"})
@Override
protected void done() {
if(isCancelled()){
synchronized protected void done() {
if (isCancelled()) {
// If the user hit cancel, delete this incomplete hash set from the central repo
deleteIncompleteSet(referenceSetID);
deleteIncompleteSet();
return;
}
try {
get();
try{
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
referenceSetID,
try {
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
referenceSetID.get(),
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
} catch (TskCoreException ex){
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
importSuccess.set(true);
} catch (TskCoreException ex) {
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
}
} catch (Exception ex) {
// Delete this incomplete hash set from the central repo
if(referenceSetID >= 0){
try{
EamDb.getInstance().deleteReferenceSet(referenceSetID);
} catch (EamDbException ex2){
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex);
}
}
deleteIncompleteSet();
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex);
}
}
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always

View File

@ -0,0 +1,160 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 - 2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.hashdatabase;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Parser for Autopsy/TSK-created databases (*.kdb)
*/
public class KdbHashSetParser implements HashSetParser {
private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS
private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS
private final String filename; // Name of the input file (saved for logging)
private final long totalHashes; // Estimated number of hashes
private int totalHashesRead = 0; // Number of hashes that have been read
private Connection conn;
private Statement statement;
private ResultSet resultSet;
KdbHashSetParser(String filename) throws TskCoreException {
this.filename = filename;
conn = null;
statement = null;
resultSet = null;
try {
// Open the database
StringBuilder connectionURL = new StringBuilder();
connectionURL.append(JDBC_BASE_URI);
connectionURL.append(filename);
Class.forName(JDBC_DRIVER);
conn = DriverManager.getConnection(connectionURL.toString());
// Get the number of hashes in the table
statement = conn.createStatement();
resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes");
if (resultSet.next()) {
totalHashes = resultSet.getLong("count");
} else {
close();
throw new TskCoreException("Error getting hash count from database " + filename);
}
// Get the hashes
resultSet = statement.executeQuery("SELECT md5 FROM hashes");
// At this point, getNextHash can read each hash from the result set
} catch (ClassNotFoundException | SQLException ex) {
throw new TskCoreException("Error opening/reading database " + filename, ex);
}
}
/**
* Get the next hash to import
*
* @return The hash as a string
* @throws TskCoreException
*/
@Override
public String getNextHash() throws TskCoreException {
try {
if (resultSet.next()) {
byte[] hashBytes = resultSet.getBytes("md5");
StringBuilder sb = new StringBuilder();
for (byte b : hashBytes) {
sb.append(String.format("%02x", b));
}
if (sb.toString().length() != 32) {
throw new TskCoreException("Hash has incorrect length: " + sb.toString());
}
totalHashesRead++;
return sb.toString();
} else {
throw new TskCoreException("Could not read expected number of hashes from database " + filename);
}
} catch (SQLException ex) {
throw new TskCoreException("Error reading hash from result set for database " + filename, ex);
}
}
/**
* Check if there are more hashes to read
*
* @return true if we've read all expected hash values, false otherwise
*/
@Override
public boolean doneReading() {
return (totalHashesRead >= totalHashes);
}
/**
* Get the expected number of hashes in the file.
*
* @return The expected hash count
*/
@Override
public long getExpectedHashCount() {
return totalHashes;
}
/**
* Closes the import file
*/
@Override
public final void close() {
if (statement != null) {
try {
statement.close();
} catch (SQLException ex) {
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex);
}
}
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException ex) {
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex);
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException ex) {
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex);
}
}
}
}