mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 18:17:43 +00:00
Refactoring hash set import
This commit is contained in:
parent
cd6d0ca14c
commit
49a631f351
@ -22,31 +22,24 @@ import java.io.InputStream;
|
|||||||
import java.io.BufferedInputStream;
|
import java.io.BufferedInputStream;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.StringBuilder;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import javax.swing.JOptionPane;
|
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.sleuthkit.autopsy.core.RuntimeProperties;
|
|
||||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
class EncaseHashSetParser {
|
class EncaseHashSetParser implements HashSetParser {
|
||||||
final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00,
|
private final byte[] encaseHeader = {(byte)0x48, (byte)0x41, (byte)0x53, (byte)0x48, (byte)0x0d, (byte)0x0a, (byte)0xff, (byte)0x00,
|
||||||
(byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00};
|
(byte)0x02, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x01, (byte)0x00, (byte)0x00, (byte)0x00};
|
||||||
InputStream inputStream;
|
private InputStream inputStream;
|
||||||
final int expectedHashes;
|
private final long expectedHashCount;
|
||||||
int totalHashesRead = 0;
|
private int totalHashesRead = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Opens the import file and parses the header.
|
* Opens the import file and parses the header.
|
||||||
* @param filename The Encase hashset
|
* @param filename The Encase hashset
|
||||||
* @throws TskCoreException There was an error opening/reading the file or it is not the correct format
|
* @throws TskCoreException There was an error opening/reading the file or it is not the correct format
|
||||||
*/
|
*/
|
||||||
@NbBundle.Messages({"EncaseHashSetParser.fileOpenError.text=Error reading import file",
|
|
||||||
"EncaseHashSetParser.wrongFormat.text=Hashset is not Encase format"})
|
|
||||||
EncaseHashSetParser(String filename) throws TskCoreException{
|
EncaseHashSetParser(String filename) throws TskCoreException{
|
||||||
try{
|
try{
|
||||||
inputStream = new BufferedInputStream(new FileInputStream(filename));
|
inputStream = new BufferedInputStream(new FileInputStream(filename));
|
||||||
@ -55,16 +48,14 @@ class EncaseHashSetParser {
|
|||||||
byte[] header = new byte[16];
|
byte[] header = new byte[16];
|
||||||
readBuffer(header, 16);
|
readBuffer(header, 16);
|
||||||
if(! Arrays.equals(header, encaseHeader)){
|
if(! Arrays.equals(header, encaseHeader)){
|
||||||
displayError(NbBundle.getMessage(this.getClass(),
|
|
||||||
"EncaseHashSetParser.wrongFormat.text"));
|
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("File " + filename + " does not have an Encase header");
|
throw new TskCoreException("File " + filename + " does not have an Encase header");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read in the expected number of hashes
|
// Read in the expected number of hashes (little endian)
|
||||||
byte[] sizeBuffer = new byte[4];
|
byte[] sizeBuffer = new byte[4];
|
||||||
readBuffer(sizeBuffer, 4);
|
readBuffer(sizeBuffer, 4);
|
||||||
expectedHashes = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
|
expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
|
||||||
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
|
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
|
||||||
|
|
||||||
// Read in a bunch of nulls
|
// Read in a bunch of nulls
|
||||||
@ -80,8 +71,6 @@ class EncaseHashSetParser {
|
|||||||
readBuffer(typeBuffer, 0x28);
|
readBuffer(typeBuffer, 0x28);
|
||||||
|
|
||||||
} catch (IOException ex){
|
} catch (IOException ex){
|
||||||
displayError(NbBundle.getMessage(this.getClass(),
|
|
||||||
"EncaseHashSetParser.fileOpenError.text"));
|
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("Error reading " + filename, ex);
|
throw new TskCoreException("Error reading " + filename, ex);
|
||||||
} catch (TskCoreException ex){
|
} catch (TskCoreException ex){
|
||||||
@ -90,21 +79,34 @@ class EncaseHashSetParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
int getExpectedHashes(){
|
/**
|
||||||
return expectedHashes;
|
* Get the expected number of hashes in the file.
|
||||||
|
* This number can be an estimate.
|
||||||
|
* @return The expected hash count
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long getExpectedHashCount(){
|
||||||
|
return expectedHashCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized boolean doneReading(){
|
/**
|
||||||
if(inputStream == null){
|
* Check if there are more hashes to read
|
||||||
return true;
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
}
|
*/
|
||||||
|
@Override
|
||||||
return(totalHashesRead >= expectedHashes);
|
public boolean doneReading(){
|
||||||
|
return(totalHashesRead >= expectedHashCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized String getNextHash() throws TskCoreException{
|
/**
|
||||||
|
* Get the next hash to import
|
||||||
|
* @return The hash as a string, or null if the end of file was reached without error
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getNextHash() throws TskCoreException{
|
||||||
if(inputStream == null){
|
if(inputStream == null){
|
||||||
return null;
|
throw new TskCoreException("Attempting to read from null inputStream");
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] hashBytes = new byte[16];
|
byte[] hashBytes = new byte[16];
|
||||||
@ -122,14 +124,16 @@ class EncaseHashSetParser {
|
|||||||
totalHashesRead++;
|
totalHashesRead++;
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
} catch (IOException ex){
|
} catch (IOException ex){
|
||||||
// Log it and return what we've got
|
|
||||||
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Ran out of data while reading Encase hash sets", ex);
|
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Ran out of data while reading Encase hash sets", ex);
|
||||||
close();
|
|
||||||
throw new TskCoreException("Error reading hash", ex);
|
throw new TskCoreException("Error reading hash", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized final void close(){
|
/**
|
||||||
|
* Closes the import file
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public final void close(){
|
||||||
if(inputStream != null){
|
if(inputStream != null){
|
||||||
try{
|
try{
|
||||||
inputStream.close();
|
inputStream.close();
|
||||||
@ -142,26 +146,13 @@ class EncaseHashSetParser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"EncaseHashSetParser.outOfData.text=Ran out of data while parsing file"})
|
@NbBundle.Messages({"EncaseHashSetParser.outOfData.text=Ran out of data while parsing file"})
|
||||||
private synchronized void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
|
private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
|
||||||
if(inputStream == null){
|
if(inputStream == null){
|
||||||
throw new TskCoreException("readBuffer called on null inputStream");
|
throw new TskCoreException("readBuffer called on null inputStream");
|
||||||
}
|
}
|
||||||
if(length != inputStream.read(buffer)){
|
if(length != inputStream.read(buffer)){
|
||||||
displayError(NbBundle.getMessage(this.getClass(),
|
|
||||||
"EncaseHashSetParser.outOfData.text"));
|
|
||||||
close();
|
close();
|
||||||
throw new TskCoreException("Ran out of data while parsing Encase file");
|
throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file");
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@NbBundle.Messages({"EncaseHashSetParser.error.title=Error importing Encase hashset"})
|
|
||||||
private void displayError(String errorText){
|
|
||||||
if(RuntimeProperties.runningWithGUI()){
|
|
||||||
JOptionPane.showMessageDialog(null,
|
|
||||||
errorText,
|
|
||||||
NbBundle.getMessage(this.getClass(),
|
|
||||||
"EncaseHashSetParser.error.title"),
|
|
||||||
JOptionPane.ERROR_MESSAGE);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -447,7 +447,8 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
|||||||
@NbBundle.Messages({"HashDbImportDatabaseDialog.missingVersion=A version must be entered",
|
@NbBundle.Messages({"HashDbImportDatabaseDialog.missingVersion=A version must be entered",
|
||||||
"HashDbImportDatabaseDialog.missingOrg=An organization must be selected",
|
"HashDbImportDatabaseDialog.missingOrg=An organization must be selected",
|
||||||
"HashDbImportDatabaseDialog.duplicateName=A hashset with this name and version already exists",
|
"HashDbImportDatabaseDialog.duplicateName=A hashset with this name and version already exists",
|
||||||
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository"
|
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository",
|
||||||
|
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered."
|
||||||
})
|
})
|
||||||
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed
|
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed
|
||||||
// Note that the error handlers in this method call return without disposing of the
|
// Note that the error handlers in this method call return without disposing of the
|
||||||
@ -456,7 +457,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
|||||||
if (hashSetNameTextField.getText().isEmpty()) {
|
if (hashSetNameTextField.getText().isEmpty()) {
|
||||||
JOptionPane.showMessageDialog(this,
|
JOptionPane.showMessageDialog(this,
|
||||||
NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
"HashDbCreateDatabaseDialog.mustEnterHashSetNameMsg"),
|
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg"),
|
||||||
NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
"HashDbImportDatabaseDialog.importHashDbErr"),
|
"HashDbImportDatabaseDialog.importHashDbErr"),
|
||||||
JOptionPane.ERROR_MESSAGE);
|
JOptionPane.ERROR_MESSAGE);
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||||
|
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
|
interface HashSetParser {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the next hash to import
|
||||||
|
* @return The hash as a string, or null if the end of file was reached without error
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
String getNextHash() throws TskCoreException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if there are more hashes to read
|
||||||
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
|
*/
|
||||||
|
boolean doneReading();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the expected number of hashes in the file.
|
||||||
|
* This number can be an estimate.
|
||||||
|
* @return The expected hash count
|
||||||
|
*/
|
||||||
|
long getExpectedHashCount();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Closes the import file
|
||||||
|
*/
|
||||||
|
void close();
|
||||||
|
}
|
@ -0,0 +1,113 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileReader;
|
||||||
|
import java.io.FileNotFoundException;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parser for idx files
|
||||||
|
*/
|
||||||
|
class IdxHashSetParser implements HashSetParser {
|
||||||
|
private String filename;
|
||||||
|
private BufferedReader reader;
|
||||||
|
private final long totalHashes;
|
||||||
|
private boolean doneReading = false;
|
||||||
|
|
||||||
|
IdxHashSetParser(String filename) throws TskCoreException{
|
||||||
|
this.filename = filename;
|
||||||
|
try{
|
||||||
|
reader = new BufferedReader(new FileReader(filename));
|
||||||
|
} catch (FileNotFoundException ex){
|
||||||
|
throw new TskCoreException("Error opening file " + filename, ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Estimate the total number of hashes in the file since counting them all can be slow
|
||||||
|
File importFile = new File(filename);
|
||||||
|
long fileSize = importFile.length();
|
||||||
|
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the next hash to import
|
||||||
|
* @return The hash as a string, or null if the end of file was reached without error
|
||||||
|
* @throws TskCoreException
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public String getNextHash() throws TskCoreException {
|
||||||
|
String line;
|
||||||
|
|
||||||
|
try{
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
|
||||||
|
String[] parts = line.split("\\|");
|
||||||
|
|
||||||
|
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
|
||||||
|
if (parts.length != 2 || parts[0].length() == 41) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts[0].toLowerCase();
|
||||||
|
}
|
||||||
|
} catch (IOException ex){
|
||||||
|
throw new TskCoreException("Error reading file " + filename, ex);
|
||||||
|
}
|
||||||
|
|
||||||
|
// We've run out of data
|
||||||
|
doneReading = true;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if there are more hashes to read
|
||||||
|
* @return true if we've read all expected hash values, false otherwise
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public boolean doneReading() {
|
||||||
|
return doneReading;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the expected number of hashes in the file.
|
||||||
|
* This number can be an estimate.
|
||||||
|
* @return The expected hash count
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public long getExpectedHashCount() {
|
||||||
|
return totalHashes;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Closes the import file
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
try{
|
||||||
|
reader.close();
|
||||||
|
} catch (IOException ex){
|
||||||
|
Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -19,12 +19,8 @@
|
|||||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||||
|
|
||||||
import java.awt.Color;
|
import java.awt.Color;
|
||||||
import java.awt.Cursor;
|
|
||||||
import java.beans.PropertyChangeListener;
|
import java.beans.PropertyChangeListener;
|
||||||
import java.beans.PropertyChangeEvent;
|
import java.beans.PropertyChangeEvent;
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileReader;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
@ -32,8 +28,8 @@ import javax.swing.JFrame;
|
|||||||
import javax.swing.SwingWorker;
|
import javax.swing.SwingWorker;
|
||||||
import javax.swing.WindowConstants;
|
import javax.swing.WindowConstants;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import javax.swing.JOptionPane;
|
|
||||||
import org.openide.util.NbBundle;
|
import org.openide.util.NbBundle;
|
||||||
import org.openide.windows.WindowManager;
|
import org.openide.windows.WindowManager;
|
||||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute;
|
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute;
|
||||||
@ -50,18 +46,8 @@ import org.sleuthkit.datamodel.TskData;
|
|||||||
*/
|
*/
|
||||||
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{
|
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener{
|
||||||
|
|
||||||
private CentralRepoImportWorker worker;
|
private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @param hashSetName
|
|
||||||
* @param version
|
|
||||||
* @param orgId
|
|
||||||
* @param searchDuringIngest
|
|
||||||
* @param sendIngestMessages
|
|
||||||
* @param knownFilesType
|
|
||||||
* @param importFile
|
|
||||||
*/
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",
|
||||||
})
|
})
|
||||||
ImportCentralRepoDbProgressDialog() {
|
ImportCentralRepoDbProgressDialog() {
|
||||||
@ -78,29 +64,16 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
bnOk.setEnabled(false);
|
bnOk.setEnabled(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.unknownFormat.message=Hash set to import is an unknown format"})
|
|
||||||
void importFile(String hashSetName, String version, int orgId,
|
void importFile(String hashSetName, String version, int orgId,
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||||
boolean readOnly, String importFileName){
|
boolean readOnly, String importFileName){
|
||||||
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
|
|
||||||
|
|
||||||
File importFile = new File(importFileName);
|
worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
||||||
if(importFileName.toLowerCase().endsWith(".idx")){
|
knownFilesType, readOnly, importFileName);
|
||||||
worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
|
||||||
knownFilesType, readOnly, importFile);
|
|
||||||
} else if(importFileName.toLowerCase().endsWith(".hash")){
|
|
||||||
worker = new ImportEncaseWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
|
||||||
knownFilesType, readOnly, importFile);
|
|
||||||
} else {
|
|
||||||
// We've gotten here with a format that can't be processed
|
|
||||||
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_unknownFormat_message());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
worker.addPropertyChangeListener(this);
|
worker.addPropertyChangeListener(this);
|
||||||
worker.execute();
|
worker.execute();
|
||||||
|
|
||||||
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
|
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
|
||||||
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
|
|
||||||
this.setVisible(true);
|
this.setVisible(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -111,53 +84,67 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= hashes processed"})
|
|
||||||
|
/**
|
||||||
|
* Updates the dialog from events from the worker.
|
||||||
|
* The two events we handle are progress updates and
|
||||||
|
* the done event.
|
||||||
|
* @param evt
|
||||||
|
*/
|
||||||
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"})
|
||||||
@Override
|
@Override
|
||||||
public void propertyChange(PropertyChangeEvent evt) {
|
public void propertyChange(PropertyChangeEvent evt) {
|
||||||
|
|
||||||
if("progress".equals(evt.getPropertyName())){
|
if("progress".equals(evt.getPropertyName())){
|
||||||
progressBar.setValue(worker.getProgressPercentage());
|
// The progress has been updated. Update the progress bar and text
|
||||||
|
progressBar.setValue(worker.getProgress());
|
||||||
lbProgress.setText(getProgressString());
|
lbProgress.setText(getProgressString());
|
||||||
} else if ("state".equals(evt.getPropertyName())
|
} else if ("state".equals(evt.getPropertyName())
|
||||||
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
|
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
|
||||||
// Disable cancel and enable ok
|
|
||||||
|
// The worker is done processing
|
||||||
|
// Disable cancel button and enable ok
|
||||||
bnCancel.setEnabled(false);
|
bnCancel.setEnabled(false);
|
||||||
bnOk.setEnabled(true);
|
bnOk.setEnabled(true);
|
||||||
|
|
||||||
if(worker.getError().isEmpty()){
|
if(worker.getImportSuccess()){
|
||||||
|
// If the import succeeded, finish the progress bar and display the
|
||||||
|
// total number of imported hashes
|
||||||
progressBar.setValue(progressBar.getMaximum());
|
progressBar.setValue(progressBar.getMaximum());
|
||||||
lbProgress.setText(getProgressString());
|
lbProgress.setText(getProgressString());
|
||||||
} else {
|
} else {
|
||||||
|
// If there was an error, reset the progress bar and display an error message
|
||||||
progressBar.setValue(0);
|
progressBar.setValue(0);
|
||||||
lbProgress.setForeground(Color.red);
|
lbProgress.setForeground(Color.red);
|
||||||
lbProgress.setText(worker.getError());
|
lbProgress.setText(Bundle.ImportCentralRepoDbProgressDialog_errorParsingFile_message());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"})
|
||||||
private String getProgressString(){
|
private String getProgressString(){
|
||||||
return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed();
|
return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message();
|
||||||
}
|
}
|
||||||
|
|
||||||
abstract class CentralRepoImportWorker extends SwingWorker<Void, Void>{
|
class CentralRepoImportWorker extends SwingWorker<Void, Void>{
|
||||||
final int HASH_IMPORT_THRESHOLD = 10000;
|
private final int HASH_IMPORT_THRESHOLD = 10000;
|
||||||
final String hashSetName;
|
private final String hashSetName;
|
||||||
final String version;
|
private final String version;
|
||||||
final int orgId;
|
private final int orgId;
|
||||||
final boolean searchDuringIngest;
|
private final boolean searchDuringIngest;
|
||||||
final boolean sendIngestMessages;
|
private final boolean sendIngestMessages;
|
||||||
final HashDbManager.HashDb.KnownFilesType knownFilesType;
|
private final HashDbManager.HashDb.KnownFilesType knownFilesType;
|
||||||
final boolean readOnly;
|
private final boolean readOnly;
|
||||||
final File importFile;
|
private final String importFileName;
|
||||||
long totalHashes = 1;
|
private long totalHashes = 1;
|
||||||
int referenceSetID = -1;
|
private int referenceSetID = -1;
|
||||||
HashDbManager.CentralRepoHashSet newHashDb = null;
|
private HashDbManager.CentralRepoHashSet newHashDb = null;
|
||||||
final AtomicLong numLines = new AtomicLong();
|
private final AtomicLong numLines = new AtomicLong();
|
||||||
String errorString = "";
|
private final AtomicBoolean importSuccess = new AtomicBoolean();
|
||||||
|
|
||||||
CentralRepoImportWorker(String hashSetName, String version, int orgId,
|
CentralRepoImportWorker(String hashSetName, String version, int orgId,
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||||
boolean readOnly, File importFile){
|
boolean readOnly, String importFileName){
|
||||||
|
|
||||||
this.hashSetName = hashSetName;
|
this.hashSetName = hashSetName;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
@ -166,11 +153,12 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
this.sendIngestMessages = sendIngestMessages;
|
this.sendIngestMessages = sendIngestMessages;
|
||||||
this.knownFilesType = knownFilesType;
|
this.knownFilesType = knownFilesType;
|
||||||
this.readOnly = readOnly;
|
this.readOnly = readOnly;
|
||||||
this.importFile = importFile;
|
this.importFileName = importFileName;
|
||||||
this.numLines.set(0);
|
this.numLines.set(0);
|
||||||
|
this.importSuccess.set(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
HashDbManager.CentralRepoHashSet getDatabase(){
|
synchronized HashDbManager.CentralRepoHashSet getDatabase(){
|
||||||
return newHashDb;
|
return newHashDb;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,19 +166,80 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
return numLines.get();
|
return numLines.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
int getProgressPercentage(){
|
boolean getImportSuccess(){
|
||||||
return this.getProgress();
|
return importSuccess.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
String getError(){
|
@Override
|
||||||
return errorString;
|
protected Void doInBackground() throws Exception {
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
// Create the hash set parser
|
||||||
* Should be called in the constructor to set the max number of hashes.
|
HashSetParser hashSetParser;
|
||||||
* The value can be updated later after parsing the import file.
|
if(importFileName.toLowerCase().endsWith(".idx")){
|
||||||
*/
|
hashSetParser = new IdxHashSetParser(importFileName);
|
||||||
abstract void setEstimatedTotalHashes();
|
} else
|
||||||
|
if(importFileName.toLowerCase().endsWith(".hash")){
|
||||||
|
hashSetParser = new EncaseHashSetParser(importFileName);
|
||||||
|
} else {
|
||||||
|
// We've gotten here with a format that can't be processed
|
||||||
|
throw new TskCoreException("Hash set to import is an unknown format : " + importFileName);
|
||||||
|
}
|
||||||
|
|
||||||
|
try{
|
||||||
|
totalHashes = hashSetParser.getExpectedHashCount();
|
||||||
|
|
||||||
|
TskData.FileKnown knownStatus;
|
||||||
|
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
||||||
|
knownStatus = TskData.FileKnown.KNOWN;
|
||||||
|
} else {
|
||||||
|
knownStatus = TskData.FileKnown.BAD;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create an empty hashset in the central repository
|
||||||
|
referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly));
|
||||||
|
|
||||||
|
EamDb dbManager = EamDb.getInstance();
|
||||||
|
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
|
||||||
|
|
||||||
|
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
||||||
|
|
||||||
|
while (! hashSetParser.doneReading()) {
|
||||||
|
if(isCancelled()){
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
String newHash = hashSetParser.getNextHash();
|
||||||
|
|
||||||
|
if(newHash != null){
|
||||||
|
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
||||||
|
referenceSetID,
|
||||||
|
newHash,
|
||||||
|
knownStatus,
|
||||||
|
"");
|
||||||
|
|
||||||
|
globalInstances.add(eamGlobalFileInstance);
|
||||||
|
|
||||||
|
if(numLines.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0){
|
||||||
|
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||||
|
globalInstances.clear();
|
||||||
|
|
||||||
|
int progress = (int)(numLines.get() * 100 / totalHashes);
|
||||||
|
if(progress < 100){
|
||||||
|
this.setProgress(progress);
|
||||||
|
} else {
|
||||||
|
this.setProgress(99);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||||
|
this.setProgress(100);
|
||||||
|
return null;
|
||||||
|
} finally {
|
||||||
|
hashSetParser.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void deleteIncompleteSet(){
|
void deleteIncompleteSet(){
|
||||||
if(referenceSetID >= 0){
|
if(referenceSetID >= 0){
|
||||||
@ -209,10 +258,8 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.importHashsetError=Error importing hash set",
|
|
||||||
"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"})
|
|
||||||
@Override
|
@Override
|
||||||
protected void done() {
|
synchronized protected void done() {
|
||||||
|
|
||||||
if(isCancelled()){
|
if(isCancelled()){
|
||||||
// If the user hit cancel, delete this incomplete hash set from the central repo
|
// If the user hit cancel, delete this incomplete hash set from the central repo
|
||||||
@ -226,184 +273,19 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
|||||||
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
|
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
|
||||||
referenceSetID,
|
referenceSetID,
|
||||||
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
|
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
|
||||||
|
importSuccess.set(true);
|
||||||
} catch (TskCoreException ex){
|
} catch (TskCoreException ex){
|
||||||
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
|
|
||||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
|
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
|
||||||
}
|
}
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
// Delete this incomplete hash set from the central repo
|
// Delete this incomplete hash set from the central repo
|
||||||
deleteIncompleteSet();
|
deleteIncompleteSet();
|
||||||
errorString = Bundle.ImportCentralRepoDbProgressDialog_importHashsetError();
|
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class ImportEncaseWorker extends CentralRepoImportWorker{
|
|
||||||
|
|
||||||
ImportEncaseWorker(String hashSetName, String version, int orgId,
|
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
|
||||||
boolean readOnly, File importFile){
|
|
||||||
super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile);
|
|
||||||
|
|
||||||
setEstimatedTotalHashes();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Encase files have a 0x480 byte header, then each hash takes 18 bytes
|
|
||||||
* @return Approximate number of hashes in the file
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
final void setEstimatedTotalHashes(){
|
|
||||||
long fileSize = importFile.length();
|
|
||||||
if(fileSize < 0x492){
|
|
||||||
totalHashes = 1; // There's room for at most one hash
|
|
||||||
}
|
|
||||||
totalHashes = (fileSize - 0x492) / 18;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Void doInBackground() throws Exception {
|
|
||||||
|
|
||||||
EncaseHashSetParser encaseParser = new EncaseHashSetParser(importFile.getAbsolutePath());
|
|
||||||
totalHashes = encaseParser.getExpectedHashes();
|
|
||||||
|
|
||||||
TskData.FileKnown knownStatus;
|
|
||||||
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
|
||||||
knownStatus = TskData.FileKnown.KNOWN;
|
|
||||||
} else {
|
|
||||||
knownStatus = TskData.FileKnown.BAD;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create an empty hashset in the central repository
|
|
||||||
referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly));
|
|
||||||
|
|
||||||
EamDb dbManager = EamDb.getInstance();
|
|
||||||
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
|
|
||||||
|
|
||||||
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
|
||||||
|
|
||||||
while (! encaseParser.doneReading()) {
|
|
||||||
if(isCancelled()){
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
String newHash = encaseParser.getNextHash();
|
|
||||||
|
|
||||||
if(newHash != null){
|
|
||||||
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
|
||||||
referenceSetID,
|
|
||||||
newHash,
|
|
||||||
knownStatus,
|
|
||||||
"");
|
|
||||||
|
|
||||||
globalInstances.add(eamGlobalFileInstance);
|
|
||||||
numLines.incrementAndGet();
|
|
||||||
|
|
||||||
if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){
|
|
||||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
|
||||||
globalInstances.clear();
|
|
||||||
|
|
||||||
int progress = (int)(numLines.get() * 100 / totalHashes);
|
|
||||||
if(progress < 100){
|
|
||||||
this.setProgress(progress);
|
|
||||||
} else {
|
|
||||||
this.setProgress(99);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
|
||||||
this.setProgress(100);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ImportIDXWorker extends CentralRepoImportWorker{
|
|
||||||
|
|
||||||
ImportIDXWorker(String hashSetName, String version, int orgId,
|
|
||||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
|
||||||
boolean readOnly, File importFile){
|
|
||||||
super(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages, knownFilesType, readOnly, importFile);
|
|
||||||
|
|
||||||
setEstimatedTotalHashes();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Doing an actual count of the number of lines in a large idx file (such
|
|
||||||
* as the nsrl) is slow, so just get something in the general area for the
|
|
||||||
* progress bar.
|
|
||||||
* @return Approximate number of hashes in the file
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
final void setEstimatedTotalHashes(){
|
|
||||||
long fileSize = importFile.length();
|
|
||||||
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected Void doInBackground() throws Exception {
|
|
||||||
|
|
||||||
TskData.FileKnown knownStatus;
|
|
||||||
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
|
||||||
knownStatus = TskData.FileKnown.KNOWN;
|
|
||||||
} else {
|
|
||||||
knownStatus = TskData.FileKnown.BAD;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create an empty hashset in the central repository
|
|
||||||
referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly));
|
|
||||||
|
|
||||||
EamDb dbManager = EamDb.getInstance();
|
|
||||||
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
|
|
||||||
BufferedReader reader = new BufferedReader(new FileReader(importFile));
|
|
||||||
String line;
|
|
||||||
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
|
||||||
|
|
||||||
while ((line = reader.readLine()) != null) {
|
|
||||||
if(isCancelled()){
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
String[] parts = line.split("\\|");
|
|
||||||
|
|
||||||
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
|
|
||||||
if (parts.length != 2 || parts[0].length() == 41) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
|
||||||
referenceSetID,
|
|
||||||
parts[0].toLowerCase(),
|
|
||||||
knownStatus,
|
|
||||||
"");
|
|
||||||
|
|
||||||
globalInstances.add(eamGlobalFileInstance);
|
|
||||||
numLines.incrementAndGet();
|
|
||||||
|
|
||||||
if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){
|
|
||||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
|
||||||
globalInstances.clear();
|
|
||||||
|
|
||||||
int progress = (int)(numLines.get() * 100 / totalHashes);
|
|
||||||
if(progress < 100){
|
|
||||||
this.setProgress(progress);
|
|
||||||
} else {
|
|
||||||
this.setProgress(99);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
|
||||||
this.setProgress(100);
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method is called from within the constructor to initialize the form.
|
* This method is called from within the constructor to initialize the form.
|
||||||
* WARNING: Do NOT modify this code. The content of this method is always
|
* WARNING: Do NOT modify this code. The content of this method is always
|
||||||
|
Loading…
x
Reference in New Issue
Block a user