mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge branch '3144_encaseHashes' into 3140b_importKdb
Conflicts: Core/src/org/sleuthkit/autopsy/modules/hashdatabase/ImportCentralRepoDbProgressDialog.java
This commit is contained in:
commit
7a4fd008c3
@ -31,95 +31,100 @@ import org.sleuthkit.datamodel.TskCoreException;
|
|||||||
* Parser for Encase format hash sets (*.hash)
|
* Parser for Encase format hash sets (*.hash)
|
||||||
*/
|
*/
|
||||||
class EncaseHashSetParser implements HashSetParser {
|
class EncaseHashSetParser implements HashSetParser {
|
||||||
private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00,
|
|
||||||
(byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00};
|
private final byte[] encaseHeader = {(byte) 0x48, (byte) 0x41, (byte) 0x53, (byte) 0x48, (byte) 0x0d, (byte) 0x0a, (byte) 0xff, (byte) 0x00,
|
||||||
|
(byte) 0x02, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00};
|
||||||
private final String filename; // Name of the input file (saved for logging)
|
private final String filename; // Name of the input file (saved for logging)
|
||||||
private InputStream inputStream; // File stream for file being imported
|
private InputStream inputStream; // File stream for file being imported
|
||||||
private final long expectedHashCount; // Number of hashes we expect to read from the file
|
private final long expectedHashCount; // Number of hashes we expect to read from the file
|
||||||
private int totalHashesRead = 0; // Number of hashes that have been read
|
private int totalHashesRead = 0; // Number of hashes that have been read
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Opens the import file and parses the header.
|
* Opens the import file and parses the header. If this is successful, the
|
||||||
* If this is successful, the file will be set up to call getNextHash() to
|
* file will be set up to call getNextHash() to read the hash values.
|
||||||
* read the hash values.
|
*
|
||||||
* @param filename The Encase hash set
|
* @param filename The Encase hash set
|
||||||
* @throws TskCoreException There was an error opening/reading the file or it is not the correct format
|
* @throws TskCoreException There was an error opening/reading the file or
|
||||||
|
* it is not the correct format
|
||||||
*/
|
*/
|
||||||
EncaseHashSetParser(String filename) throws TskCoreException{
|
EncaseHashSetParser(String filename) throws TskCoreException {
|
||||||
try{
|
try {
|
||||||
this.filename = filename;
|
this.filename = filename;
|
||||||
inputStream = new BufferedInputStream(new FileInputStream(filename));
|
inputStream = new BufferedInputStream(new FileInputStream(filename));
|
||||||
|
|
||||||
// Read in and test the 16 byte header
|
// Read in and test the 16 byte header
|
||||||
byte[] header = new byte[16];
|
byte[] header = new byte[16];
|
||||||
readBuffer(header, 16);
|
readBuffer(header, 16);
|
||||||
if(! Arrays.equals(header, encaseHeader)){
|
if (!Arrays.equals(header, encaseHeader)) {
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("File " + filename + " does not have an Encase header");
|
throw new TskCoreException("File " + filename + " does not have an Encase header");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read in the expected number of hashes (little endian)
|
// Read in the expected number of hashes (little endian)
|
||||||
byte[] sizeBuffer = new byte[4];
|
byte[] sizeBuffer = new byte[4];
|
||||||
readBuffer(sizeBuffer, 4);
|
readBuffer(sizeBuffer, 4);
|
||||||
expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
|
expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
|
||||||
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
|
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
|
||||||
|
|
||||||
// Read in a bunch of nulls
|
// Read in a bunch of nulls
|
||||||
byte[] filler = new byte[0x3f4];
|
byte[] filler = new byte[0x3f4];
|
||||||
readBuffer(filler, 0x3f4);
|
readBuffer(filler, 0x3f4);
|
||||||
|
|
||||||
// Read in the hash set name
|
// Read in the hash set name
|
||||||
byte[] nameBuffer = new byte[0x50];
|
byte[] nameBuffer = new byte[0x50];
|
||||||
readBuffer(nameBuffer, 0x50);
|
readBuffer(nameBuffer, 0x50);
|
||||||
|
|
||||||
// Read in the hash set type
|
// Read in the hash set type
|
||||||
byte[] typeBuffer = new byte[0x28];
|
byte[] typeBuffer = new byte[0x28];
|
||||||
readBuffer(typeBuffer, 0x28);
|
readBuffer(typeBuffer, 0x28);
|
||||||
|
|
||||||
// At this point we're past the header and ready to read in the hashes
|
// At this point we're past the header and ready to read in the hashes
|
||||||
|
} catch (IOException ex) {
|
||||||
} catch (IOException ex){
|
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("Error reading " + filename, ex);
|
throw new TskCoreException("Error reading " + filename, ex);
|
||||||
} catch (TskCoreException ex){
|
} catch (TskCoreException ex) {
|
||||||
close();
|
close();
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the expected number of hashes in the file.
|
* Get the expected number of hashes in the file. This number can be an
|
||||||
* This number can be an estimate.
|
* estimate.
|
||||||
|
*
|
||||||
* @return The expected hash count
|
* @return The expected hash count
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public long getExpectedHashCount(){
|
public long getExpectedHashCount() {
|
||||||
return expectedHashCount;
|
return expectedHashCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if there are more hashes to read
|
* Check if there are more hashes to read
|
||||||
|
*
|
||||||
* @return true if we've read all expected hash values, false otherwise
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean doneReading(){
|
public boolean doneReading() {
|
||||||
return(totalHashesRead >= expectedHashCount);
|
return (totalHashesRead >= expectedHashCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the next hash to import
|
* Get the next hash to import
|
||||||
* @return The hash as a string, or null if the end of file was reached without error
|
*
|
||||||
* @throws TskCoreException
|
* @return The hash as a string, or null if the end of file was reached
|
||||||
|
* without error
|
||||||
|
* @throws TskCoreException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getNextHash() throws TskCoreException{
|
public String getNextHash() throws TskCoreException {
|
||||||
if(inputStream == null){
|
if (inputStream == null) {
|
||||||
throw new TskCoreException("Attempting to read from null inputStream");
|
throw new TskCoreException("Attempting to read from null inputStream");
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] hashBytes = new byte[16];
|
byte[] hashBytes = new byte[16];
|
||||||
byte[] divider = new byte[2];
|
byte[] divider = new byte[2];
|
||||||
try{
|
try {
|
||||||
|
|
||||||
readBuffer(hashBytes, 16);
|
readBuffer(hashBytes, 16);
|
||||||
readBuffer(divider, 2);
|
readBuffer(divider, 2);
|
||||||
@ -131,32 +136,32 @@ class EncaseHashSetParser implements HashSetParser {
|
|||||||
|
|
||||||
totalHashesRead++;
|
totalHashesRead++;
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
} catch (IOException ex){
|
} catch (IOException ex) {
|
||||||
throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex);
|
throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Closes the import file
|
* Closes the import file
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public final void close(){
|
public final void close() {
|
||||||
if(inputStream != null){
|
if (inputStream != null) {
|
||||||
try{
|
try {
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
} catch (IOException ex){
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex);
|
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex);
|
||||||
} finally {
|
} finally {
|
||||||
inputStream = null;
|
inputStream = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
|
private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
|
||||||
if(inputStream == null){
|
if (inputStream == null) {
|
||||||
throw new TskCoreException("readBuffer called on null inputStream");
|
throw new TskCoreException("readBuffer called on null inputStream");
|
||||||
}
|
}
|
||||||
if(length != inputStream.read(buffer)){
|
if (length != inputStream.read(buffer)) {
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename);
|
throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename);
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
hashDbOrgLabel.setText(NO_SELECTION_TEXT);
|
hashDbOrgLabel.setText(NO_SELECTION_TEXT);
|
||||||
hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT);
|
hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT);
|
||||||
indexPathLabel.setText(NO_SELECTION_TEXT);
|
indexPathLabel.setText(NO_SELECTION_TEXT);
|
||||||
|
|
||||||
|
|
||||||
// Update indexing components.
|
// Update indexing components.
|
||||||
hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT);
|
hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT);
|
||||||
@ -162,14 +161,14 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
|
|
||||||
// Update descriptive labels.
|
// Update descriptive labels.
|
||||||
hashDbNameLabel.setText(db.getHashSetName());
|
hashDbNameLabel.setText(db.getHashSetName());
|
||||||
hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName());
|
hashDbTypeLabel.setText(db.getKnownFilesType().getDisplayName());
|
||||||
try{
|
try {
|
||||||
if(db.isUpdateable()){
|
if (db.isUpdateable()) {
|
||||||
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_editable());
|
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_editable());
|
||||||
} else {
|
} else {
|
||||||
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_readOnly());
|
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_readOnly());
|
||||||
}
|
}
|
||||||
} catch (TskCoreException ex){
|
} catch (TskCoreException ex) {
|
||||||
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_updateStatusError());
|
hashDbReadOnlyLabel.setText(Bundle.HashLookupSettingsPanel_updateStatusError());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -180,30 +179,30 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
addHashesToDatabaseButton.setEnabled(false);
|
addHashesToDatabaseButton.setEnabled(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
if(db instanceof SleuthkitHashSet){
|
if (db instanceof SleuthkitHashSet) {
|
||||||
SleuthkitHashSet hashDb = (SleuthkitHashSet)db;
|
SleuthkitHashSet hashDb = (SleuthkitHashSet) db;
|
||||||
|
|
||||||
// Disable the central repo fields
|
// Disable the central repo fields
|
||||||
hashDbVersionLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
hashDbVersionLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
||||||
hashDbOrgLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
hashDbOrgLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
||||||
|
|
||||||
// Enable the delete button if ingest is not running
|
// Enable the delete button if ingest is not running
|
||||||
deleteDatabaseButton.setEnabled(!ingestIsRunning);
|
deleteDatabaseButton.setEnabled(!ingestIsRunning);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
hashDbLocationLabel.setText(shortenPath(db.getDatabasePath()));
|
hashDbLocationLabel.setText(shortenPath(db.getDatabasePath()));
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
|
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
|
||||||
hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT);
|
hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
indexPathLabel.setText(shortenPath(hashDb.getIndexPath()));
|
indexPathLabel.setText(shortenPath(hashDb.getIndexPath()));
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
|
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex); //NON-NLS
|
||||||
indexPathLabel.setText(ERROR_GETTING_PATH_TEXT);
|
indexPathLabel.setText(ERROR_GETTING_PATH_TEXT);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update indexing components.
|
// Update indexing components.
|
||||||
try {
|
try {
|
||||||
if (hashDb.isIndexing()) {
|
if (hashDb.isIndexing()) {
|
||||||
@ -245,15 +244,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
indexButton.setEnabled(false);
|
indexButton.setEnabled(false);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// Disable the file type fields/buttons
|
// Disable the file type fields/buttons
|
||||||
indexPathLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
indexPathLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
||||||
hashDbIndexStatusLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
hashDbIndexStatusLabel.setText(Bundle.HashLookupSettingsPanel_notApplicable());
|
||||||
hashDbLocationLabel.setText(Bundle.HashLookupSettingsPanel_centralRepo());
|
hashDbLocationLabel.setText(Bundle.HashLookupSettingsPanel_centralRepo());
|
||||||
indexButton.setEnabled(false);
|
indexButton.setEnabled(false);
|
||||||
deleteDatabaseButton.setEnabled(false);
|
deleteDatabaseButton.setEnabled(false);
|
||||||
|
|
||||||
CentralRepoHashSet crDb = (CentralRepoHashSet)db;
|
CentralRepoHashSet crDb = (CentralRepoHashSet) db;
|
||||||
|
|
||||||
hashDbVersionLabel.setText(crDb.getVersion());
|
hashDbVersionLabel.setText(crDb.getVersion());
|
||||||
hashDbOrgLabel.setText(crDb.getOrgName());
|
hashDbOrgLabel.setText(crDb.getOrgName());
|
||||||
@ -302,13 +301,17 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
@Override
|
@Override
|
||||||
@Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.",
|
@Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.",
|
||||||
"HashLookupSettingsPanel.saveFail.title=Save Fail"})
|
"HashLookupSettingsPanel.saveFail.title=Save Fail"})
|
||||||
public void saveSettings() {
|
public void saveSettings() {
|
||||||
|
|
||||||
|
// Clear out the list of unsaved hashes
|
||||||
|
newReferenceSetIDs.clear();
|
||||||
|
|
||||||
//Checking for for any unindexed databases
|
//Checking for for any unindexed databases
|
||||||
List<SleuthkitHashSet> unindexed = new ArrayList<>();
|
List<SleuthkitHashSet> unindexed = new ArrayList<>();
|
||||||
for (HashDb db : hashSetManager.getAllHashSets()) {
|
for (HashDb db : hashSetManager.getAllHashSets()) {
|
||||||
if(db instanceof SleuthkitHashSet){
|
if (db instanceof SleuthkitHashSet) {
|
||||||
try {
|
try {
|
||||||
SleuthkitHashSet hashDatabase = (SleuthkitHashSet)db;
|
SleuthkitHashSet hashDatabase = (SleuthkitHashSet) db;
|
||||||
if (!hashDatabase.hasIndex()) {
|
if (!hashDatabase.hasIndex()) {
|
||||||
unindexed.add(hashDatabase);
|
unindexed.add(hashDatabase);
|
||||||
}
|
}
|
||||||
@ -320,10 +323,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
|
|
||||||
// If there are unindexed databases, give the user the option to index them now. This
|
// If there are unindexed databases, give the user the option to index them now. This
|
||||||
// needs to be on the EDT, and will save the hash settings after completing
|
// needs to be on the EDT, and will save the hash settings after completing
|
||||||
if(! unindexed.isEmpty()){
|
if (!unindexed.isEmpty()) {
|
||||||
SwingUtilities.invokeLater(new Runnable(){
|
SwingUtilities.invokeLater(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run(){
|
public void run() {
|
||||||
//If unindexed ones are found, show a popup box that will either index them, or remove them.
|
//If unindexed ones are found, show a popup box that will either index them, or remove them.
|
||||||
if (unindexed.size() == 1) {
|
if (unindexed.size() == 1) {
|
||||||
showInvalidIndex(false, unindexed);
|
showInvalidIndex(false, unindexed);
|
||||||
@ -335,7 +338,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
hashSetManager.save();
|
hashSetManager.save();
|
||||||
newReferenceSetIDs.clear();
|
|
||||||
} catch (HashDbManager.HashDbManagerException ex) {
|
} catch (HashDbManager.HashDbManagerException ex) {
|
||||||
SwingUtilities.invokeLater(() -> {
|
SwingUtilities.invokeLater(() -> {
|
||||||
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE);
|
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_saveFail_message(), Bundle.HashLookupSettingsPanel_saveFail_title(), JOptionPane.ERROR_MESSAGE);
|
||||||
@ -363,20 +365,20 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
*/
|
*/
|
||||||
if (IngestManager.getInstance().isIngestRunning() == false) {
|
if (IngestManager.getInstance().isIngestRunning() == false) {
|
||||||
// Remove any new central repo hash sets from the database
|
// Remove any new central repo hash sets from the database
|
||||||
for(int refID:newReferenceSetIDs){
|
for (int refID : newReferenceSetIDs) {
|
||||||
try{
|
try {
|
||||||
if(EamDb.isEnabled()){
|
if (EamDb.isEnabled()) {
|
||||||
EamDb.getInstance().deleteReferenceSet(refID);
|
EamDb.getInstance().deleteReferenceSet(refID);
|
||||||
} else {
|
} else {
|
||||||
// This is the case where the user imported a database, then switched over to the central
|
// This is the case where the user imported a database, then switched over to the central
|
||||||
// repo panel and disabled it before cancelling. We can't delete the database at this point.
|
// repo panel and disabled it before cancelling. We can't delete the database at this point.
|
||||||
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.WARNING, "Error reverting central repository hash sets"); //NON-NLS
|
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.WARNING, "Error reverting central repository hash sets"); //NON-NLS
|
||||||
}
|
}
|
||||||
} catch (EamDbException ex){
|
} catch (EamDbException ex) {
|
||||||
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS
|
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
HashDbManager.getInstance().loadLastSavedConfiguration();
|
HashDbManager.getInstance().loadLastSavedConfiguration();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -398,7 +400,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
* unindexed, along with solutions. This method is related to
|
* unindexed, along with solutions. This method is related to
|
||||||
* ModalNoButtons, to be removed at a later date.
|
* ModalNoButtons, to be removed at a later date.
|
||||||
*
|
*
|
||||||
* @param plural Whether or not there are multiple unindexed databases
|
* @param plural Whether or not there are multiple unindexed databases
|
||||||
* @param unindexed The list of unindexed databases. Can be of size 1.
|
* @param unindexed The list of unindexed databases. Can be of size 1.
|
||||||
*/
|
*/
|
||||||
private void showInvalidIndex(boolean plural, List<SleuthkitHashSet> unindexed) {
|
private void showInvalidIndex(boolean plural, List<SleuthkitHashSet> unindexed) {
|
||||||
@ -471,8 +473,8 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
getSelectionModel().setSelectionInterval(index, index);
|
getSelectionModel().setSelectionInterval(index, index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void selectRowByDatabase(HashDb db){
|
public void selectRowByDatabase(HashDb db) {
|
||||||
setSelection(hashSetTableModel.getIndexByDatabase(db));
|
setSelection(hashSetTableModel.getIndexByDatabase(db));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -510,7 +512,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
return hashSets.get(rowIndex).getDisplayName();
|
return hashSets.get(rowIndex).getDisplayName();
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isValid(int rowIndex) {
|
private boolean isValid(int rowIndex) {
|
||||||
try {
|
try {
|
||||||
return hashSets.get(rowIndex).isValid();
|
return hashSets.get(rowIndex).isValid();
|
||||||
} catch (TskCoreException ex) {
|
} catch (TskCoreException ex) {
|
||||||
@ -543,15 +545,15 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int getIndexByDatabase(HashDb db){
|
int getIndexByDatabase(HashDb db) {
|
||||||
for (int i = 0; i < hashSets.size(); ++i) {
|
for (int i = 0; i < hashSets.size(); ++i) {
|
||||||
if (hashSets.get(i).equals(db)) {
|
if (hashSets.get(i).equals(db)) {
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Deprecated
|
@Deprecated
|
||||||
int getIndexByName(String name) {
|
int getIndexByName(String name) {
|
||||||
for (int i = 0; i < hashSets.size(); ++i) {
|
for (int i = 0; i < hashSets.size(); ++i) {
|
||||||
@ -934,11 +936,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
private void createDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createDatabaseButtonActionPerformed
|
private void createDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createDatabaseButtonActionPerformed
|
||||||
HashDb hashDb = new HashDbCreateDatabaseDialog().getHashDatabase();
|
HashDb hashDb = new HashDbCreateDatabaseDialog().getHashDatabase();
|
||||||
if (null != hashDb) {
|
if (null != hashDb) {
|
||||||
if(hashDb instanceof CentralRepoHashSet){
|
if (hashDb instanceof CentralRepoHashSet) {
|
||||||
int newDbIndex = ((CentralRepoHashSet)hashDb).getReferenceSetID();
|
int newDbIndex = ((CentralRepoHashSet) hashDb).getReferenceSetID();
|
||||||
newReferenceSetIDs.add(newDbIndex);
|
newReferenceSetIDs.add(newDbIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
hashSetTableModel.refreshModel();
|
hashSetTableModel.refreshModel();
|
||||||
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
|
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
|
||||||
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
||||||
@ -960,7 +962,7 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
|
|
||||||
// Add a listener for the INDEXING_DONE event. This listener will update
|
// Add a listener for the INDEXING_DONE event. This listener will update
|
||||||
// the UI.
|
// the UI.
|
||||||
SleuthkitHashSet hashDb = (SleuthkitHashSet)hashDatabase;
|
SleuthkitHashSet hashDb = (SleuthkitHashSet) hashDatabase;
|
||||||
hashDb.addPropertyChangeListener(new PropertyChangeListener() {
|
hashDb.addPropertyChangeListener(new PropertyChangeListener() {
|
||||||
@Override
|
@Override
|
||||||
public void propertyChange(PropertyChangeEvent evt) {
|
public void propertyChange(PropertyChangeEvent evt) {
|
||||||
@ -988,11 +990,11 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
private void importDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importDatabaseButtonActionPerformed
|
private void importDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_importDatabaseButtonActionPerformed
|
||||||
HashDb hashDb = new HashDbImportDatabaseDialog().getHashDatabase();
|
HashDb hashDb = new HashDbImportDatabaseDialog().getHashDatabase();
|
||||||
if (null != hashDb) {
|
if (null != hashDb) {
|
||||||
if(hashDb instanceof CentralRepoHashSet){
|
if (hashDb instanceof CentralRepoHashSet) {
|
||||||
int newReferenceSetID = ((CentralRepoHashSet)hashDb).getReferenceSetID();
|
int newReferenceSetID = ((CentralRepoHashSet) hashDb).getReferenceSetID();
|
||||||
newReferenceSetIDs.add(newReferenceSetID);
|
newReferenceSetIDs.add(newReferenceSetID);
|
||||||
}
|
}
|
||||||
|
|
||||||
hashSetTableModel.refreshModel();
|
hashSetTableModel.refreshModel();
|
||||||
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
|
((HashSetTable) hashSetTable).selectRowByDatabase(hashDb);
|
||||||
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
||||||
@ -1002,21 +1004,21 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
|||||||
@Messages({})
|
@Messages({})
|
||||||
private void deleteDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteDatabaseButtonActionPerformed
|
private void deleteDatabaseButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteDatabaseButtonActionPerformed
|
||||||
if (JOptionPane.showConfirmDialog(null,
|
if (JOptionPane.showConfirmDialog(null,
|
||||||
NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
"HashDbConfigPanel.deleteDbActionConfirmMsg"),
|
"HashDbConfigPanel.deleteDbActionConfirmMsg"),
|
||||||
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"),
|
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.deleteDbActionMsg"),
|
||||||
JOptionPane.YES_NO_OPTION,
|
JOptionPane.YES_NO_OPTION,
|
||||||
JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) {
|
JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION) {
|
||||||
HashDb hashDb = ((HashSetTable) hashSetTable).getSelection();
|
HashDb hashDb = ((HashSetTable) hashSetTable).getSelection();
|
||||||
if (hashDb != null) {
|
if (hashDb != null) {
|
||||||
try {
|
try {
|
||||||
hashSetManager.removeHashDatabaseNoSave(hashDb);
|
hashSetManager.removeHashDatabaseNoSave(hashDb);
|
||||||
} catch (HashDbManager.HashDbManagerException ex) {
|
} catch (HashDbManager.HashDbManagerException ex) {
|
||||||
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName()));
|
JOptionPane.showMessageDialog(null, Bundle.HashLookupSettingsPanel_removeDatabaseFailure_message(hashDb.getHashSetName()));
|
||||||
|
}
|
||||||
|
hashSetTableModel.refreshModel();
|
||||||
|
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
||||||
}
|
}
|
||||||
hashSetTableModel.refreshModel();
|
|
||||||
firePropertyChange(OptionsPanelController.PROP_CHANGED, null, null);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}//GEN-LAST:event_deleteDatabaseButtonActionPerformed
|
}//GEN-LAST:event_deleteDatabaseButtonActionPerformed
|
||||||
|
|
||||||
|
@ -21,27 +21,31 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
|
|||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
interface HashSetParser {
|
interface HashSetParser {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the next hash to import
|
* Get the next hash to import
|
||||||
* @return The hash as a string, or null if the end of file was reached without error
|
*
|
||||||
* @throws TskCoreException
|
* @return The hash as a string, or null if the end of file was reached
|
||||||
|
* without error
|
||||||
|
* @throws TskCoreException
|
||||||
*/
|
*/
|
||||||
String getNextHash() throws TskCoreException;
|
String getNextHash() throws TskCoreException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if there are more hashes to read
|
* Check if there are more hashes to read
|
||||||
|
*
|
||||||
* @return true if we've read all expected hash values, false otherwise
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
*/
|
*/
|
||||||
boolean doneReading();
|
boolean doneReading();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the expected number of hashes in the file.
|
* Get the expected number of hashes in the file. This number can be an
|
||||||
* This number can be an estimate.
|
* estimate.
|
||||||
|
*
|
||||||
* @return The expected hash count
|
* @return The expected hash count
|
||||||
*/
|
*/
|
||||||
long getExpectedHashCount();
|
long getExpectedHashCount();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Closes the import file
|
* Closes the import file
|
||||||
*/
|
*/
|
||||||
|
@ -31,35 +31,38 @@ import org.sleuthkit.datamodel.TskCoreException;
|
|||||||
* Parser for idx files (*.idx)
|
* Parser for idx files (*.idx)
|
||||||
*/
|
*/
|
||||||
class IdxHashSetParser implements HashSetParser {
|
class IdxHashSetParser implements HashSetParser {
|
||||||
|
|
||||||
private final String filename; // Name of the input file (saved for logging)
|
private final String filename; // Name of the input file (saved for logging)
|
||||||
private BufferedReader reader; // Input file
|
private BufferedReader reader; // Input file
|
||||||
private final long totalHashes; // Estimated number of hashes
|
private final long totalHashes; // Estimated number of hashes
|
||||||
private boolean doneReading = false; // Flag for if we've hit the end of the file
|
private boolean doneReading = false; // Flag for if we've hit the end of the file
|
||||||
|
|
||||||
IdxHashSetParser(String filename) throws TskCoreException{
|
IdxHashSetParser(String filename) throws TskCoreException {
|
||||||
this.filename = filename;
|
this.filename = filename;
|
||||||
try{
|
try {
|
||||||
reader = new BufferedReader(new FileReader(filename));
|
reader = new BufferedReader(new FileReader(filename));
|
||||||
} catch (FileNotFoundException ex){
|
} catch (FileNotFoundException ex) {
|
||||||
throw new TskCoreException("Error opening file " + filename, ex);
|
throw new TskCoreException("Error opening file " + filename, ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Estimate the total number of hashes in the file since counting them all can be slow
|
// Estimate the total number of hashes in the file since counting them all can be slow
|
||||||
File importFile = new File(filename);
|
File importFile = new File(filename);
|
||||||
long fileSize = importFile.length();
|
long fileSize = importFile.length();
|
||||||
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero
|
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the next hash to import
|
* Get the next hash to import
|
||||||
* @return The hash as a string, or null if the end of file was reached without error
|
*
|
||||||
* @throws TskCoreException
|
* @return The hash as a string, or null if the end of file was reached
|
||||||
|
* without error
|
||||||
|
* @throws TskCoreException
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public String getNextHash() throws TskCoreException {
|
public String getNextHash() throws TskCoreException {
|
||||||
String line;
|
String line;
|
||||||
|
|
||||||
try{
|
try {
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
|
|
||||||
String[] parts = line.split("\\|");
|
String[] parts = line.split("\\|");
|
||||||
@ -68,45 +71,47 @@ class IdxHashSetParser implements HashSetParser {
|
|||||||
if (parts.length != 2 || parts[0].length() == 41) {
|
if (parts.length != 2 || parts[0].length() == 41) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
return parts[0].toLowerCase();
|
return parts[0].toLowerCase();
|
||||||
}
|
}
|
||||||
} catch (IOException ex){
|
} catch (IOException ex) {
|
||||||
throw new TskCoreException("Error reading file " + filename, ex);
|
throw new TskCoreException("Error reading file " + filename, ex);
|
||||||
}
|
}
|
||||||
|
|
||||||
// We've run out of data
|
// We've run out of data
|
||||||
doneReading = true;
|
doneReading = true;
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if there are more hashes to read
|
* Check if there are more hashes to read
|
||||||
|
*
|
||||||
* @return true if we've read all expected hash values, false otherwise
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean doneReading() {
|
public boolean doneReading() {
|
||||||
return doneReading;
|
return doneReading;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the expected number of hashes in the file.
|
* Get the expected number of hashes in the file. This number can be an
|
||||||
* This number can be an estimate.
|
* estimate.
|
||||||
|
*
|
||||||
* @return The expected hash count
|
* @return The expected hash count
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public long getExpectedHashCount() {
|
public long getExpectedHashCount() {
|
||||||
return totalHashes;
|
return totalHashes;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Closes the import file
|
* Closes the import file
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
try{
|
try {
|
||||||
reader.close();
|
reader.close();
|
||||||
} catch (IOException ex){
|
} catch (IOException ex) {
|
||||||
Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex);
|
Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,31 +45,31 @@ import org.sleuthkit.datamodel.TskData;
|
|||||||
/**
|
/**
|
||||||
* Imports a hash set into the central repository and updates a progress dialog
|
* Imports a hash set into the central repository and updates a progress dialog
|
||||||
*/
|
*/
|
||||||
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{
|
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener {
|
||||||
|
|
||||||
private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog
|
private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog
|
||||||
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",})
|
||||||
})
|
|
||||||
ImportCentralRepoDbProgressDialog() {
|
ImportCentralRepoDbProgressDialog() {
|
||||||
super((JFrame) WindowManager.getDefault().getMainWindow(),
|
super((JFrame) WindowManager.getDefault().getMainWindow(),
|
||||||
Bundle.ImportCentralRepoDbProgressDialog_title_text(),
|
Bundle.ImportCentralRepoDbProgressDialog_title_text(),
|
||||||
true);
|
true);
|
||||||
|
|
||||||
initComponents();
|
initComponents();
|
||||||
customizeComponents();
|
customizeComponents();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void customizeComponents(){
|
private void customizeComponents() {
|
||||||
// This is preventing the user from closing the dialog using the X
|
// This is preventing the user from closing the dialog using the X
|
||||||
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
|
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
|
||||||
|
|
||||||
bnOk.setEnabled(false);
|
bnOk.setEnabled(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Import the selected hash set into the central repository.
|
* Import the selected hash set into the central repository. Will bring up a
|
||||||
* Will bring up a progress dialog while the import is in progress.
|
* progress dialog while the import is in progress.
|
||||||
|
*
|
||||||
* @param hashSetName
|
* @param hashSetName
|
||||||
* @param version
|
* @param version
|
||||||
* @param orgId
|
* @param orgId
|
||||||
@ -77,57 +77,57 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
* @param sendIngestMessages
|
* @param sendIngestMessages
|
||||||
* @param knownFilesType
|
* @param knownFilesType
|
||||||
* @param readOnly
|
* @param readOnly
|
||||||
* @param importFileName
|
* @param importFileName
|
||||||
*/
|
*/
|
||||||
void importFile(String hashSetName, String version, int orgId,
|
void importFile(String hashSetName, String version, int orgId,
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||||
boolean readOnly, String importFileName){
|
boolean readOnly, String importFileName) {
|
||||||
|
|
||||||
worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
||||||
knownFilesType, readOnly, importFileName);
|
knownFilesType, readOnly, importFileName);
|
||||||
worker.addPropertyChangeListener(this);
|
worker.addPropertyChangeListener(this);
|
||||||
worker.execute();
|
worker.execute();
|
||||||
|
|
||||||
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
|
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
|
||||||
this.setVisible(true);
|
this.setVisible(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the HashDb object for the newly imported data.
|
* Get the HashDb object for the newly imported data. Should be called after
|
||||||
* Should be called after importFile completes.
|
* importFile completes.
|
||||||
|
*
|
||||||
* @return The new HashDb object or null if the import failed/was canceled
|
* @return The new HashDb object or null if the import failed/was canceled
|
||||||
*/
|
*/
|
||||||
HashDbManager.HashDb getDatabase(){
|
HashDbManager.HashDb getDatabase() {
|
||||||
if(worker != null){
|
if (worker != null) {
|
||||||
return worker.getDatabase();
|
return worker.getDatabase();
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates the dialog from events from the worker.
|
* Updates the dialog from events from the worker. The two events we handle
|
||||||
* The two events we handle are progress updates and
|
* are progress updates and the done event.
|
||||||
* the done event.
|
*
|
||||||
* @param evt
|
* @param evt
|
||||||
*/
|
*/
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"})
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"})
|
||||||
@Override
|
@Override
|
||||||
public void propertyChange(PropertyChangeEvent evt) {
|
public void propertyChange(PropertyChangeEvent evt) {
|
||||||
|
|
||||||
if("progress".equals(evt.getPropertyName())){
|
if ("progress".equals(evt.getPropertyName())) {
|
||||||
// The progress has been updated. Update the progress bar and text
|
// The progress has been updated. Update the progress bar and text
|
||||||
progressBar.setValue(worker.getProgress());
|
progressBar.setValue(worker.getProgress());
|
||||||
lbProgress.setText(getProgressString());
|
lbProgress.setText(getProgressString());
|
||||||
} else if ("state".equals(evt.getPropertyName())
|
} else if ("state".equals(evt.getPropertyName())
|
||||||
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
|
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
|
||||||
|
|
||||||
// The worker is done processing
|
// The worker is done processing
|
||||||
// Disable cancel button and enable ok
|
// Disable cancel button and enable ok
|
||||||
bnCancel.setEnabled(false);
|
bnCancel.setEnabled(false);
|
||||||
bnOk.setEnabled(true);
|
bnOk.setEnabled(true);
|
||||||
|
|
||||||
if(worker.getImportSuccess()){
|
if (worker.getImportSuccess()) {
|
||||||
// If the import succeeded, finish the progress bar and display the
|
// If the import succeeded, finish the progress bar and display the
|
||||||
// total number of imported hashes
|
// total number of imported hashes
|
||||||
progressBar.setValue(progressBar.getMaximum());
|
progressBar.setValue(progressBar.getMaximum());
|
||||||
@ -140,13 +140,14 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"})
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"})
|
||||||
private String getProgressString(){
|
private String getProgressString() {
|
||||||
return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message();
|
return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message();
|
||||||
}
|
}
|
||||||
|
|
||||||
private class CentralRepoImportWorker extends SwingWorker<Void, Void>{
|
private class CentralRepoImportWorker extends SwingWorker<Void, Void> {
|
||||||
|
|
||||||
private final int HASH_IMPORT_THRESHOLD = 10000;
|
private final int HASH_IMPORT_THRESHOLD = 10000;
|
||||||
private final String hashSetName;
|
private final String hashSetName;
|
||||||
private final String version;
|
private final String version;
|
||||||
@ -160,11 +161,11 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
private final AtomicInteger referenceSetID = new AtomicInteger();
|
private final AtomicInteger referenceSetID = new AtomicInteger();
|
||||||
private final AtomicLong hashCount = new AtomicLong();
|
private final AtomicLong hashCount = new AtomicLong();
|
||||||
private final AtomicBoolean importSuccess = new AtomicBoolean();
|
private final AtomicBoolean importSuccess = new AtomicBoolean();
|
||||||
|
|
||||||
CentralRepoImportWorker(String hashSetName, String version, int orgId,
|
CentralRepoImportWorker(String hashSetName, String version, int orgId,
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||||
boolean readOnly, String importFileName){
|
boolean readOnly, String importFileName) {
|
||||||
|
|
||||||
this.hashSetName = hashSetName;
|
this.hashSetName = hashSetName;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.orgId = orgId;
|
this.orgId = orgId;
|
||||||
@ -177,48 +178,57 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
this.importSuccess.set(false);
|
this.importSuccess.set(false);
|
||||||
this.referenceSetID.set(-1);
|
this.referenceSetID.set(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the newly created database
|
* Get the newly created database
|
||||||
* @return the imported database. May be null if an error occurred or the user canceled
|
*
|
||||||
|
* @return the imported database. May be null if an error occurred or
|
||||||
|
* the user canceled
|
||||||
*/
|
*/
|
||||||
synchronized HashDbManager.CentralRepoHashSet getDatabase(){
|
synchronized HashDbManager.CentralRepoHashSet getDatabase() {
|
||||||
return newHashDb;
|
return newHashDb;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the number of hashes that have been read in so far
|
* Get the number of hashes that have been read in so far
|
||||||
|
*
|
||||||
* @return current hash count
|
* @return current hash count
|
||||||
*/
|
*/
|
||||||
long getNumHashesProcessed(){
|
long getNumHashesProcessed() {
|
||||||
return hashCount.get();
|
return hashCount.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the import was successful or if there was an error.
|
* Check if the import was successful or if there was an error.
|
||||||
* @return true if the import process completed without error, false otherwise
|
*
|
||||||
|
* @return true if the import process completed without error, false
|
||||||
|
* otherwise
|
||||||
*/
|
*/
|
||||||
boolean getImportSuccess(){
|
boolean getImportSuccess() {
|
||||||
return importSuccess.get();
|
return importSuccess.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Void doInBackground() throws Exception {
|
protected Void doInBackground() throws Exception {
|
||||||
|
|
||||||
// Create the hash set parser
|
// Create the hash set parser
|
||||||
HashSetParser hashSetParser;
|
HashSetParser hashSetParser;
|
||||||
if(importFileName.toLowerCase().endsWith(".idx")){
|
if (importFileName.toLowerCase().endsWith(".idx")) {
|
||||||
hashSetParser = new IdxHashSetParser(importFileName);
|
hashSetParser = new IdxHashSetParser(importFileName);
|
||||||
} else if(importFileName.toLowerCase().endsWith(".hash")){
|
} else if(importFileName.toLowerCase().endsWith(".hash")){
|
||||||
hashSetParser = new EncaseHashSetParser(importFileName);
|
hashSetParser = new EncaseHashSetParser(importFileName);
|
||||||
} else if(importFileName.toLowerCase().endsWith(".kdb")){
|
} else if(importFileName.toLowerCase().endsWith(".kdb")){
|
||||||
hashSetParser = new KdbHashSetParser(importFileName);
|
hashSetParser = new KdbHashSetParser(importFileName);
|
||||||
} else {
|
} else {
|
||||||
// We've gotten here with a format that can't be processed
|
if (importFileName.toLowerCase().endsWith(".hash")) {
|
||||||
throw new TskCoreException("Hash set to import is an unknown format : " + importFileName);
|
hashSetParser = new EncaseHashSetParser(importFileName);
|
||||||
|
} else {
|
||||||
|
// We've gotten here with a format that can't be processed
|
||||||
|
throw new TskCoreException("Hash set to import is an unknown format : " + importFileName);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try{
|
try {
|
||||||
// Conver to the FileKnown enum used by EamGlobalSet
|
// Conver to the FileKnown enum used by EamGlobalSet
|
||||||
TskData.FileKnown knownStatus;
|
TskData.FileKnown knownStatus;
|
||||||
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
||||||
@ -226,7 +236,7 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
} else {
|
} else {
|
||||||
knownStatus = TskData.FileKnown.BAD;
|
knownStatus = TskData.FileKnown.BAD;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create an empty hashset in the central repository
|
// Create an empty hashset in the central repository
|
||||||
EamDb dbManager = EamDb.getInstance();
|
EamDb dbManager = EamDb.getInstance();
|
||||||
referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)));
|
referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)));
|
||||||
@ -238,30 +248,30 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
// Holds the current batch of hashes that need to be written to the central repo
|
// Holds the current batch of hashes that need to be written to the central repo
|
||||||
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
||||||
|
|
||||||
while (! hashSetParser.doneReading()) {
|
while (!hashSetParser.doneReading()) {
|
||||||
if(isCancelled()){
|
if (isCancelled()) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
String newHash = hashSetParser.getNextHash();
|
String newHash = hashSetParser.getNextHash();
|
||||||
|
|
||||||
if(newHash != null){
|
if (newHash != null) {
|
||||||
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
||||||
referenceSetID.get(),
|
referenceSetID.get(),
|
||||||
newHash,
|
newHash,
|
||||||
knownStatus,
|
knownStatus,
|
||||||
"");
|
"");
|
||||||
|
|
||||||
globalInstances.add(eamGlobalFileInstance);
|
globalInstances.add(eamGlobalFileInstance);
|
||||||
|
|
||||||
// If we've hit the threshold for writing the hashes, write them
|
// If we've hit the threshold for writing the hashes, write them
|
||||||
// all to the central repo
|
// all to the central repo
|
||||||
if(hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){
|
if (hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0) {
|
||||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||||
globalInstances.clear();
|
globalInstances.clear();
|
||||||
|
|
||||||
int progress = (int)(hashCount.get() * 100 / hashSetParser.getExpectedHashCount());
|
int progress = (int) (hashCount.get() * 100 / hashSetParser.getExpectedHashCount());
|
||||||
if(progress < 100){
|
if (progress < 100) {
|
||||||
this.setProgress(progress);
|
this.setProgress(progress);
|
||||||
} else {
|
} else {
|
||||||
this.setProgress(99);
|
this.setProgress(99);
|
||||||
@ -278,41 +288,41 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
hashSetParser.close();
|
hashSetParser.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void deleteIncompleteSet(){
|
private void deleteIncompleteSet() {
|
||||||
if(referenceSetID.get() >= 0){
|
if (referenceSetID.get() >= 0) {
|
||||||
|
|
||||||
// This can be slow on large reference sets
|
// This can be slow on large reference sets
|
||||||
Executors.newSingleThreadExecutor().execute(new Runnable() {
|
Executors.newSingleThreadExecutor().execute(new Runnable() {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
try{
|
try {
|
||||||
EamDb.getInstance().deleteReferenceSet(referenceSetID.get());
|
EamDb.getInstance().deleteReferenceSet(referenceSetID.get());
|
||||||
} catch (EamDbException ex2){
|
} catch (EamDbException ex2) {
|
||||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
|
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
synchronized protected void done() {
|
synchronized protected void done() {
|
||||||
|
|
||||||
if(isCancelled()){
|
if (isCancelled()) {
|
||||||
// If the user hit cancel, delete this incomplete hash set from the central repo
|
// If the user hit cancel, delete this incomplete hash set from the central repo
|
||||||
deleteIncompleteSet();
|
deleteIncompleteSet();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
get();
|
get();
|
||||||
try{
|
try {
|
||||||
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
|
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
|
||||||
referenceSetID.get(),
|
referenceSetID.get(),
|
||||||
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
|
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
|
||||||
importSuccess.set(true);
|
importSuccess.set(true);
|
||||||
} catch (TskCoreException ex){
|
} catch (TskCoreException ex) {
|
||||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
|
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
@ -320,10 +330,10 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
deleteIncompleteSet();
|
deleteIncompleteSet();
|
||||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex);
|
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method is called from within the constructor to initialize the form.
|
* This method is called from within the constructor to initialize the form.
|
||||||
* WARNING: Do NOT modify this code. The content of this method is always
|
* WARNING: Do NOT modify this code. The content of this method is always
|
||||||
@ -417,4 +427,4 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
private javax.swing.JLabel lbProgress;
|
private javax.swing.JLabel lbProgress;
|
||||||
private javax.swing.JProgressBar progressBar;
|
private javax.swing.JProgressBar progressBar;
|
||||||
// End of variables declaration//GEN-END:variables
|
// End of variables declaration//GEN-END:variables
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user