mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 02:07:42 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 3202-TagsMessagingOnStatusChange
This commit is contained in:
commit
84181a922c
@ -95,14 +95,6 @@ public interface EamDb {
|
||||
&& EamDbPlatformEnum.getSelectedPlatform() != EamDbPlatformEnum.DISABLED;
|
||||
}
|
||||
|
||||
/**
|
||||
* Placeholder version to use for non-read only databases
|
||||
* @return The version that will be stored in the database
|
||||
*/
|
||||
static String getDefaultVersion() {
|
||||
return "";
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a new name/value pair in the db_info table.
|
||||
*
|
||||
|
@ -0,0 +1,168 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Parser for Encase format hash sets (*.hash)
|
||||
*/
|
||||
class EncaseHashSetParser implements HashSetParser {
|
||||
|
||||
private final byte[] encaseHeader = {(byte) 0x48, (byte) 0x41, (byte) 0x53, (byte) 0x48, (byte) 0x0d, (byte) 0x0a, (byte) 0xff, (byte) 0x00,
|
||||
(byte) 0x02, (byte) 0x00, (byte) 0x00, (byte) 0x00, (byte) 0x01, (byte) 0x00, (byte) 0x00, (byte) 0x00};
|
||||
private final String filename; // Name of the input file (saved for logging)
|
||||
private InputStream inputStream; // File stream for file being imported
|
||||
private final long expectedHashCount; // Number of hashes we expect to read from the file
|
||||
private int totalHashesRead = 0; // Number of hashes that have been read
|
||||
|
||||
/**
|
||||
* Opens the import file and parses the header. If this is successful, the
|
||||
* file will be set up to call getNextHash() to read the hash values.
|
||||
*
|
||||
* @param filename The Encase hash set
|
||||
* @throws TskCoreException There was an error opening/reading the file or
|
||||
* it is not the correct format
|
||||
*/
|
||||
EncaseHashSetParser(String filename) throws TskCoreException {
|
||||
try {
|
||||
this.filename = filename;
|
||||
inputStream = new BufferedInputStream(new FileInputStream(filename));
|
||||
|
||||
// Read in and test the 16 byte header
|
||||
byte[] header = new byte[16];
|
||||
readBuffer(header, 16);
|
||||
if (!Arrays.equals(header, encaseHeader)) {
|
||||
close();
|
||||
throw new TskCoreException("File " + filename + " does not have an Encase header");
|
||||
}
|
||||
|
||||
// Read in the expected number of hashes (little endian)
|
||||
byte[] sizeBuffer = new byte[4];
|
||||
readBuffer(sizeBuffer, 4);
|
||||
expectedHashCount = ((sizeBuffer[3] & 0xff) << 24) | ((sizeBuffer[2] & 0xff) << 16)
|
||||
| ((sizeBuffer[1] & 0xff) << 8) | (sizeBuffer[0] & 0xff);
|
||||
|
||||
// Read in a bunch of nulls
|
||||
byte[] filler = new byte[0x3f4];
|
||||
readBuffer(filler, 0x3f4);
|
||||
|
||||
// Read in the hash set name
|
||||
byte[] nameBuffer = new byte[0x50];
|
||||
readBuffer(nameBuffer, 0x50);
|
||||
|
||||
// Read in the hash set type
|
||||
byte[] typeBuffer = new byte[0x28];
|
||||
readBuffer(typeBuffer, 0x28);
|
||||
|
||||
// At this point we're past the header and ready to read in the hashes
|
||||
} catch (IOException ex) {
|
||||
close();
|
||||
throw new TskCoreException("Error reading " + filename, ex);
|
||||
} catch (TskCoreException ex) {
|
||||
close();
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected number of hashes in the file. This number can be an
|
||||
* estimate.
|
||||
*
|
||||
* @return The expected hash count
|
||||
*/
|
||||
@Override
|
||||
public long getExpectedHashCount() {
|
||||
return expectedHashCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are more hashes to read
|
||||
*
|
||||
* @return true if we've read all expected hash values, false otherwise
|
||||
*/
|
||||
@Override
|
||||
public boolean doneReading() {
|
||||
return (totalHashesRead >= expectedHashCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next hash to import
|
||||
*
|
||||
* @return The hash as a string, or null if the end of file was reached
|
||||
* without error
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
@Override
|
||||
public String getNextHash() throws TskCoreException {
|
||||
if (inputStream == null) {
|
||||
throw new TskCoreException("Attempting to read from null inputStream");
|
||||
}
|
||||
|
||||
byte[] hashBytes = new byte[16];
|
||||
byte[] divider = new byte[2];
|
||||
try {
|
||||
|
||||
readBuffer(hashBytes, 16);
|
||||
readBuffer(divider, 2);
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (byte b : hashBytes) {
|
||||
sb.append(String.format("%02x", b));
|
||||
}
|
||||
|
||||
totalHashesRead++;
|
||||
return sb.toString();
|
||||
} catch (IOException ex) {
|
||||
throw new TskCoreException("Ran out of data while reading Encase hash set " + filename, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the import file
|
||||
*/
|
||||
@Override
|
||||
public final void close() {
|
||||
if (inputStream != null) {
|
||||
try {
|
||||
inputStream.close();
|
||||
} catch (IOException ex) {
|
||||
Logger.getLogger(EncaseHashSetParser.class.getName()).log(Level.SEVERE, "Error closing Encase hash set " + filename, ex);
|
||||
} finally {
|
||||
inputStream = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void readBuffer(byte[] buffer, int length) throws TskCoreException, IOException {
|
||||
if (inputStream == null) {
|
||||
throw new TskCoreException("readBuffer called on null inputStream");
|
||||
}
|
||||
if (length != inputStream.read(buffer)) {
|
||||
throw new TskCoreException("Ran out of data unexpectedly while parsing Encase file " + filename);
|
||||
}
|
||||
}
|
||||
}
|
@ -423,7 +423,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
|
||||
}//GEN-LAST:event_saveAsButtonActionPerformed
|
||||
|
||||
@NbBundle.Messages({"HashDbCreateDatabaseDialog.missingOrg=An organization must be selected",
|
||||
"HashDbCreateDatabaseDialog.duplicateName=A hashset with this name and version already exists",
|
||||
"HashDbCreateDatabaseDialog.duplicateName=A hashset with this name already exists",
|
||||
"HashDbCreateDatabaseDialog.databaseLookupError=Error accessing central repository",
|
||||
"HashDbCreateDatabaseDialog.databaseCreationError=Error creating new hash set"
|
||||
})
|
||||
@ -500,7 +500,7 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
|
||||
} else {
|
||||
// Check if a hash set with the same name/version already exists
|
||||
try{
|
||||
if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), EamDb.getDefaultVersion())){
|
||||
if(EamDb.getInstance().referenceSetExists(hashSetNameTextField.getText(), "")){
|
||||
JOptionPane.showMessageDialog(this,
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"HashDbCreateDatabaseDialog.duplicateName"),
|
||||
@ -522,9 +522,9 @@ final class HashDbCreateDatabaseDialog extends javax.swing.JDialog {
|
||||
|
||||
try{
|
||||
int referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(selectedOrg.getOrgID(), hashSetNameTextField.getText(),
|
||||
EamDb.getDefaultVersion(), fileKnown, false));
|
||||
"", fileKnown, false));
|
||||
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetNameTextField.getText(),
|
||||
EamDb.getDefaultVersion(), referenceSetID,
|
||||
"", referenceSetID,
|
||||
true, sendIngestMessagesCheckbox.isSelected(), type, false);
|
||||
} catch (EamDbException | TskCoreException ex){
|
||||
Logger.getLogger(HashDbImportDatabaseDialog.class.getName()).log(Level.SEVERE, "Error creating new reference set", ex);
|
||||
|
@ -88,11 +88,11 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
||||
fileChooser.setMultiSelectionEnabled(false);
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.idx only)"})
|
||||
@NbBundle.Messages({"HashDbImportDatabaseDialog.centralRepoExtFilter.text=Hash Database File (.kdb, .idx or .hash)"})
|
||||
private void updateFileChooserFilter() {
|
||||
fileChooser.resetChoosableFileFilters();
|
||||
if(centralRepoRadioButton.isSelected()){
|
||||
String[] EXTENSION = new String[]{"idx"}; //NON-NLS
|
||||
String[] EXTENSION = new String[]{"kdb", "idx", "hash", "Hash"}; //NON-NLS
|
||||
FileNameExtensionFilter filter = new FileNameExtensionFilter(
|
||||
NbBundle.getMessage(this.getClass(), "HashDbImportDatabaseDialog.centralRepoExtFilter.text"), EXTENSION);
|
||||
fileChooser.setFileFilter(filter);
|
||||
@ -447,7 +447,8 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
||||
@NbBundle.Messages({"HashDbImportDatabaseDialog.missingVersion=A version must be entered",
|
||||
"HashDbImportDatabaseDialog.missingOrg=An organization must be selected",
|
||||
"HashDbImportDatabaseDialog.duplicateName=A hashset with this name and version already exists",
|
||||
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository"
|
||||
"HashDbImportDatabaseDialog.databaseLookupError=Error accessing central repository",
|
||||
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg=A hash set name must be entered."
|
||||
})
|
||||
private void okButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_okButtonActionPerformed
|
||||
// Note that the error handlers in this method call return without disposing of the
|
||||
@ -456,7 +457,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
||||
if (hashSetNameTextField.getText().isEmpty()) {
|
||||
JOptionPane.showMessageDialog(this,
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"HashDbCreateDatabaseDialog.mustEnterHashSetNameMsg"),
|
||||
"HashDbImportDatabaseDialog.mustEnterHashSetNameMsg"),
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"HashDbImportDatabaseDialog.importHashDbErr"),
|
||||
JOptionPane.ERROR_MESSAGE);
|
||||
@ -464,7 +465,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
||||
}
|
||||
|
||||
if(centralRepoRadioButton.isSelected()){
|
||||
if(versionTextField.getText().isEmpty()){
|
||||
if(readOnlyCheckbox.isSelected() && versionTextField.getText().isEmpty()){
|
||||
JOptionPane.showMessageDialog(this,
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"HashDbImportDatabaseDialog.missingVersion"),
|
||||
@ -557,7 +558,7 @@ final class HashDbImportDatabaseDialog extends javax.swing.JDialog {
|
||||
version = versionTextField.getText();
|
||||
} else {
|
||||
// Editable databases don't have a version
|
||||
version = EamDb.getDefaultVersion();
|
||||
version = "";
|
||||
}
|
||||
ImportCentralRepoDbProgressDialog progressDialog = new ImportCentralRepoDbProgressDialog();
|
||||
progressDialog.importFile(hashSetNameTextField.getText(), version,
|
||||
|
@ -729,8 +729,6 @@ public class HashDbManager implements PropertyChangeListener {
|
||||
|
||||
public abstract boolean getSearchDuringIngest();
|
||||
|
||||
abstract boolean getDefaultSearchDuringIngest();
|
||||
|
||||
abstract void setSearchDuringIngest(boolean useForIngest);
|
||||
|
||||
public abstract boolean getSendIngestMessages();
|
||||
@ -870,12 +868,6 @@ public class HashDbManager implements PropertyChangeListener {
|
||||
return searchDuringIngest;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean getDefaultSearchDuringIngest(){
|
||||
// File type hash sets are on by default
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setSearchDuringIngest(boolean useForIngest) {
|
||||
this.searchDuringIngest = useForIngest;
|
||||
@ -1177,12 +1169,6 @@ public class HashDbManager implements PropertyChangeListener {
|
||||
return searchDuringIngest;
|
||||
}
|
||||
|
||||
@Override
|
||||
boolean getDefaultSearchDuringIngest(){
|
||||
// Central repo hash sets are off by default
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
void setSearchDuringIngest(boolean useForIngest) {
|
||||
this.searchDuringIngest = useForIngest;
|
||||
|
@ -135,8 +135,8 @@ final class HashLookupModuleSettings implements IngestModuleIngestJobSettings {
|
||||
}
|
||||
}
|
||||
|
||||
// We didn't find it, so use the default value
|
||||
return db.getDefaultSearchDuringIngest();
|
||||
// We didn't find it, so use the value in the HashDb object
|
||||
return db.getSearchDuringIngest();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -128,7 +128,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
||||
hashDbReadOnlyLabel.setText(NO_SELECTION_TEXT);
|
||||
indexPathLabel.setText(NO_SELECTION_TEXT);
|
||||
|
||||
|
||||
// Update indexing components.
|
||||
hashDbIndexStatusLabel.setText(NO_SELECTION_TEXT);
|
||||
hashDbIndexStatusLabel.setForeground(Color.black);
|
||||
@ -303,6 +302,10 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
||||
@Messages({"HashLookupSettingsPanel.saveFail.message=Couldn't save hash db settings.",
|
||||
"HashLookupSettingsPanel.saveFail.title=Save Fail"})
|
||||
public void saveSettings() {
|
||||
// Clear out the list of new central repo hash sets. They don't need to be
|
||||
// indexed so will all be saved on both code paths.
|
||||
newReferenceSetIDs.clear();
|
||||
|
||||
//Checking for for any unindexed databases
|
||||
List<SleuthkitHashSet> unindexed = new ArrayList<>();
|
||||
for (HashDb db : hashSetManager.getAllHashSets()) {
|
||||
@ -375,7 +378,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
|
||||
Logger.getLogger(HashLookupSettingsPanel.class.getName()).log(Level.SEVERE, "Error reverting central repository hash sets", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
HashDbManager.getInstance().loadLastSavedConfiguration();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,53 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
interface HashSetParser {
|
||||
|
||||
/**
|
||||
* Get the next hash to import
|
||||
*
|
||||
* @return The hash as a string, or null if the end of file was reached
|
||||
* without error
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
String getNextHash() throws TskCoreException;
|
||||
|
||||
/**
|
||||
* Check if there are more hashes to read
|
||||
*
|
||||
* @return true if we've read all expected hash values, false otherwise
|
||||
*/
|
||||
boolean doneReading();
|
||||
|
||||
/**
|
||||
* Get the expected number of hashes in the file. This number can be an
|
||||
* estimate.
|
||||
*
|
||||
* @return The expected hash count
|
||||
*/
|
||||
long getExpectedHashCount();
|
||||
|
||||
/**
|
||||
* Closes the import file
|
||||
*/
|
||||
void close();
|
||||
}
|
@ -0,0 +1,118 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Parser for idx files (*.idx)
|
||||
*/
|
||||
class IdxHashSetParser implements HashSetParser {
|
||||
|
||||
private final String filename; // Name of the input file (saved for logging)
|
||||
private BufferedReader reader; // Input file
|
||||
private final long totalHashes; // Estimated number of hashes
|
||||
private boolean doneReading = false; // Flag for if we've hit the end of the file
|
||||
|
||||
IdxHashSetParser(String filename) throws TskCoreException {
|
||||
this.filename = filename;
|
||||
try {
|
||||
reader = new BufferedReader(new FileReader(filename));
|
||||
} catch (FileNotFoundException ex) {
|
||||
throw new TskCoreException("Error opening file " + filename, ex);
|
||||
}
|
||||
|
||||
// Estimate the total number of hashes in the file since counting them all can be slow
|
||||
File importFile = new File(filename);
|
||||
long fileSize = importFile.length();
|
||||
totalHashes = fileSize / 0x33 + 1; // IDX file lines are generally 0x33 bytes long. We add one to prevent this from being zero
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next hash to import
|
||||
*
|
||||
* @return The hash as a string, or null if the end of file was reached
|
||||
* without error
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
@Override
|
||||
public String getNextHash() throws TskCoreException {
|
||||
String line;
|
||||
|
||||
try {
|
||||
while ((line = reader.readLine()) != null) {
|
||||
|
||||
String[] parts = line.split("\\|");
|
||||
|
||||
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
|
||||
if (parts.length != 2 || parts[0].length() == 41) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return parts[0].toLowerCase();
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
throw new TskCoreException("Error reading file " + filename, ex);
|
||||
}
|
||||
|
||||
// We've run out of data
|
||||
doneReading = true;
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are more hashes to read
|
||||
*
|
||||
* @return true if we've read all expected hash values, false otherwise
|
||||
*/
|
||||
@Override
|
||||
public boolean doneReading() {
|
||||
return doneReading;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected number of hashes in the file. This number can be an
|
||||
* estimate.
|
||||
*
|
||||
* @return The expected hash count
|
||||
*/
|
||||
@Override
|
||||
public long getExpectedHashCount() {
|
||||
return totalHashes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the import file
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
reader.close();
|
||||
} catch (IOException ex) {
|
||||
Logger.getLogger(IdxHashSetParser.class.getName()).log(Level.SEVERE, "Error closing file " + filename, ex);
|
||||
}
|
||||
}
|
||||
}
|
@ -18,12 +18,9 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import java.awt.Cursor;
|
||||
import java.awt.Color;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
@ -31,8 +28,9 @@ import javax.swing.JFrame;
|
||||
import javax.swing.SwingWorker;
|
||||
import javax.swing.WindowConstants;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.Executors;
|
||||
import javax.swing.JOptionPane;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttribute;
|
||||
@ -45,24 +43,13 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
*
|
||||
* Imports a hash set into the central repository and updates a progress dialog
|
||||
*/
|
||||
class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements PropertyChangeListener {
|
||||
|
||||
private CentralRepoImportWorker worker;
|
||||
private CentralRepoImportWorker worker; // Swing worker that will import the file and send updates to the dialog
|
||||
|
||||
/**
|
||||
*
|
||||
* @param hashSetName
|
||||
* @param version
|
||||
* @param orgId
|
||||
* @param searchDuringIngest
|
||||
* @param sendIngestMessages
|
||||
* @param knownFilesType
|
||||
* @param importFile
|
||||
*/
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",
|
||||
})
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress",})
|
||||
ImportCentralRepoDbProgressDialog() {
|
||||
super((JFrame) WindowManager.getDefault().getMainWindow(),
|
||||
Bundle.ImportCentralRepoDbProgressDialog_title_text(),
|
||||
@ -73,26 +60,44 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
}
|
||||
|
||||
private void customizeComponents() {
|
||||
// This is preventing the user from closing the dialog using the X
|
||||
setDefaultCloseOperation(WindowConstants.DO_NOTHING_ON_CLOSE);
|
||||
|
||||
bnOk.setEnabled(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import the selected hash set into the central repository. Will bring up a
|
||||
* progress dialog while the import is in progress.
|
||||
*
|
||||
* @param hashSetName
|
||||
* @param version
|
||||
* @param orgId
|
||||
* @param searchDuringIngest
|
||||
* @param sendIngestMessages
|
||||
* @param knownFilesType
|
||||
* @param readOnly
|
||||
* @param importFileName
|
||||
*/
|
||||
void importFile(String hashSetName, String version, int orgId,
|
||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||
boolean readOnly, String importFileName) {
|
||||
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
|
||||
|
||||
File importFile = new File(importFileName);
|
||||
worker = new ImportIDXWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
||||
knownFilesType, readOnly, importFile);
|
||||
worker = new CentralRepoImportWorker(hashSetName, version, orgId, searchDuringIngest, sendIngestMessages,
|
||||
knownFilesType, readOnly, importFileName);
|
||||
worker.addPropertyChangeListener(this);
|
||||
worker.execute();
|
||||
|
||||
setLocationRelativeTo((JFrame) WindowManager.getDefault().getMainWindow());
|
||||
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
|
||||
this.setVisible(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HashDb object for the newly imported data. Should be called after
|
||||
* importFile completes.
|
||||
*
|
||||
* @return The new HashDb object or null if the import failed/was canceled
|
||||
*/
|
||||
HashDbManager.HashDb getDatabase() {
|
||||
if (worker != null) {
|
||||
return worker.getDatabase();
|
||||
@ -100,39 +105,48 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
return null;
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed= lines processed"})
|
||||
/**
|
||||
* Updates the dialog from events from the worker. The two events we handle
|
||||
* are progress updates and the done event.
|
||||
*
|
||||
* @param evt
|
||||
*/
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash set file"})
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
|
||||
if ("progress".equals(evt.getPropertyName())) {
|
||||
progressBar.setValue(worker.getProgressPercentage());
|
||||
// The progress has been updated. Update the progress bar and text
|
||||
progressBar.setValue(worker.getProgress());
|
||||
lbProgress.setText(getProgressString());
|
||||
} else if ("state".equals(evt.getPropertyName())
|
||||
&& (SwingWorker.StateValue.DONE.equals(evt.getNewValue()))) {
|
||||
// Disable cancel and enable ok
|
||||
|
||||
// The worker is done processing
|
||||
// Disable cancel button and enable ok
|
||||
bnCancel.setEnabled(false);
|
||||
bnOk.setEnabled(true);
|
||||
|
||||
if (worker.getImportSuccess()) {
|
||||
// If the import succeeded, finish the progress bar and display the
|
||||
// total number of imported hashes
|
||||
progressBar.setValue(progressBar.getMaximum());
|
||||
lbProgress.setText(getProgressString());
|
||||
} else {
|
||||
// If there was an error, reset the progress bar and display an error message
|
||||
progressBar.setValue(0);
|
||||
lbProgress.setForeground(Color.red);
|
||||
lbProgress.setText(Bundle.ImportCentralRepoDbProgressDialog_errorParsingFile_message());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.linesProcessed.message= hashes processed"})
|
||||
private String getProgressString() {
|
||||
return worker.getLinesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed();
|
||||
return worker.getNumHashesProcessed() + Bundle.ImportCentralRepoDbProgressDialog_linesProcessed_message();
|
||||
}
|
||||
|
||||
private interface CentralRepoImportWorker{
|
||||
|
||||
void execute();
|
||||
boolean cancel(boolean mayInterruptIfRunning);
|
||||
void addPropertyChangeListener(PropertyChangeListener dialog);
|
||||
int getProgressPercentage();
|
||||
long getLinesProcessed();
|
||||
HashDbManager.HashDb getDatabase();
|
||||
}
|
||||
|
||||
class ImportIDXWorker extends SwingWorker<Void,Void> implements CentralRepoImportWorker{
|
||||
private class CentralRepoImportWorker extends SwingWorker<Void, Void> {
|
||||
|
||||
private final int HASH_IMPORT_THRESHOLD = 10000;
|
||||
private final String hashSetName;
|
||||
@ -142,15 +156,15 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
private final boolean sendIngestMessages;
|
||||
private final HashDbManager.HashDb.KnownFilesType knownFilesType;
|
||||
private final boolean readOnly;
|
||||
private final File importFile;
|
||||
private final long totalLines;
|
||||
private int referenceSetID = -1;
|
||||
private final String importFileName;
|
||||
private HashDbManager.CentralRepoHashSet newHashDb = null;
|
||||
private final AtomicLong numLines = new AtomicLong();
|
||||
private final AtomicInteger referenceSetID = new AtomicInteger();
|
||||
private final AtomicLong hashCount = new AtomicLong();
|
||||
private final AtomicBoolean importSuccess = new AtomicBoolean();
|
||||
|
||||
ImportIDXWorker(String hashSetName, String version, int orgId,
|
||||
CentralRepoImportWorker(String hashSetName, String version, int orgId,
|
||||
boolean searchDuringIngest, boolean sendIngestMessages, HashDbManager.HashDb.KnownFilesType knownFilesType,
|
||||
boolean readOnly, File importFile){
|
||||
boolean readOnly, String importFileName) {
|
||||
|
||||
this.hashSetName = hashSetName;
|
||||
this.version = version;
|
||||
@ -159,41 +173,59 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
this.sendIngestMessages = sendIngestMessages;
|
||||
this.knownFilesType = knownFilesType;
|
||||
this.readOnly = readOnly;
|
||||
this.importFile = importFile;
|
||||
this.numLines.set(0);
|
||||
|
||||
this.totalLines = getEstimatedTotalHashes();
|
||||
this.importFileName = importFileName;
|
||||
this.hashCount.set(0);
|
||||
this.importSuccess.set(false);
|
||||
this.referenceSetID.set(-1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Doing an actual count of the number of lines in a large idx file (such
|
||||
* as the nsrl) is slow, so just get something in the general area for the
|
||||
* progress bar.
|
||||
* @return Approximate number of hashes in the file
|
||||
* Get the newly created database
|
||||
*
|
||||
* @return the imported database. May be null if an error occurred or
|
||||
* the user canceled
|
||||
*/
|
||||
final long getEstimatedTotalHashes(){
|
||||
long fileSize = importFile.length();
|
||||
return (fileSize / 0x33 + 1); // IDX file lines are generally 0x33 bytes long, and we don't want this to be zero
|
||||
}
|
||||
|
||||
@Override
|
||||
public HashDbManager.HashDb getDatabase(){
|
||||
synchronized HashDbManager.CentralRepoHashSet getDatabase() {
|
||||
return newHashDb;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLinesProcessed(){
|
||||
return numLines.get();
|
||||
/**
|
||||
* Get the number of hashes that have been read in so far
|
||||
*
|
||||
* @return current hash count
|
||||
*/
|
||||
long getNumHashesProcessed() {
|
||||
return hashCount.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getProgressPercentage(){
|
||||
return this.getProgress();
|
||||
/**
|
||||
* Check if the import was successful or if there was an error.
|
||||
*
|
||||
* @return true if the import process completed without error, false
|
||||
* otherwise
|
||||
*/
|
||||
boolean getImportSuccess() {
|
||||
return importSuccess.get();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void doInBackground() throws Exception {
|
||||
|
||||
// Create the hash set parser
|
||||
HashSetParser hashSetParser;
|
||||
if (importFileName.toLowerCase().endsWith(".idx")) {
|
||||
hashSetParser = new IdxHashSetParser(importFileName);
|
||||
} else if(importFileName.toLowerCase().endsWith(".hash")){
|
||||
hashSetParser = new EncaseHashSetParser(importFileName);
|
||||
} else if(importFileName.toLowerCase().endsWith(".kdb")){
|
||||
hashSetParser = new KdbHashSetParser(importFileName);
|
||||
} else {
|
||||
// We've gotten here with a format that can't be processed
|
||||
throw new TskCoreException("Hash set to import is an unknown format : " + importFileName);
|
||||
}
|
||||
|
||||
try {
|
||||
// Conver to the FileKnown enum used by EamGlobalSet
|
||||
TskData.FileKnown knownStatus;
|
||||
if (knownFilesType.equals(HashDbManager.HashDb.KnownFilesType.KNOWN)) {
|
||||
knownStatus = TskData.FileKnown.KNOWN;
|
||||
@ -202,40 +234,39 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
}
|
||||
|
||||
// Create an empty hashset in the central repository
|
||||
referenceSetID = EamDb.getInstance().newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly));
|
||||
|
||||
EamDb dbManager = EamDb.getInstance();
|
||||
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID); // get "FILES" type
|
||||
BufferedReader reader = new BufferedReader(new FileReader(importFile));
|
||||
String line;
|
||||
referenceSetID.set(dbManager.newReferenceSet(new EamGlobalSet(orgId, hashSetName, version, knownStatus, readOnly)));
|
||||
|
||||
// Get the "FILES" content type. This is a database lookup so we
|
||||
// only want to do it once.
|
||||
CorrelationAttribute.Type contentType = dbManager.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID);
|
||||
|
||||
// Holds the current batch of hashes that need to be written to the central repo
|
||||
Set<EamGlobalFileInstance> globalInstances = new HashSet<>();
|
||||
|
||||
while ((line = reader.readLine()) != null) {
|
||||
while (!hashSetParser.doneReading()) {
|
||||
if (isCancelled()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String[] parts = line.split("\\|");
|
||||
|
||||
// Header lines start with a 41 character dummy hash, 1 character longer than a SHA-1 hash
|
||||
if (parts.length != 2 || parts[0].length() == 41) {
|
||||
continue;
|
||||
}
|
||||
String newHash = hashSetParser.getNextHash();
|
||||
|
||||
if (newHash != null) {
|
||||
EamGlobalFileInstance eamGlobalFileInstance = new EamGlobalFileInstance(
|
||||
referenceSetID,
|
||||
parts[0].toLowerCase(),
|
||||
referenceSetID.get(),
|
||||
newHash,
|
||||
knownStatus,
|
||||
"");
|
||||
|
||||
globalInstances.add(eamGlobalFileInstance);
|
||||
numLines.incrementAndGet();
|
||||
|
||||
if(numLines.get() % HASH_IMPORT_THRESHOLD == 0){
|
||||
// If we've hit the threshold for writing the hashes, write them
|
||||
// all to the central repo
|
||||
if (hashCount.incrementAndGet() % HASH_IMPORT_THRESHOLD == 0) {
|
||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||
globalInstances.clear();
|
||||
|
||||
int progress = (int)(numLines.get() * 100 / totalLines);
|
||||
int progress = (int) (hashCount.get() * 100 / hashSetParser.getExpectedHashCount());
|
||||
if (progress < 100) {
|
||||
this.setProgress(progress);
|
||||
} else {
|
||||
@ -243,22 +274,26 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||
this.setProgress(100);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private void deleteIncompleteSet(int idToDelete){
|
||||
if(idToDelete >= 0){
|
||||
// Add any remaining hashes to the central repo
|
||||
dbManager.bulkInsertReferenceTypeEntries(globalInstances, contentType);
|
||||
this.setProgress(100);
|
||||
return null;
|
||||
} finally {
|
||||
hashSetParser.close();
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteIncompleteSet() {
|
||||
if (referenceSetID.get() >= 0) {
|
||||
|
||||
// This can be slow on large reference sets
|
||||
Executors.newSingleThreadExecutor().execute(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
EamDb.getInstance().deleteReferenceSet(idToDelete);
|
||||
EamDb.getInstance().deleteReferenceSet(referenceSetID.get());
|
||||
} catch (EamDbException ex2) {
|
||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex2);
|
||||
}
|
||||
@ -267,12 +302,12 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"ImportCentralRepoDbProgressDialog.addDbError.message=Error adding new hash set"})
|
||||
@Override
|
||||
protected void done() {
|
||||
synchronized protected void done() {
|
||||
|
||||
if (isCancelled()) {
|
||||
// If the user hit cancel, delete this incomplete hash set from the central repo
|
||||
deleteIncompleteSet(referenceSetID);
|
||||
deleteIncompleteSet();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -280,23 +315,19 @@ class ImportCentralRepoDbProgressDialog extends javax.swing.JDialog implements P
|
||||
get();
|
||||
try {
|
||||
newHashDb = HashDbManager.getInstance().addExistingCentralRepoHashSet(hashSetName, version,
|
||||
referenceSetID,
|
||||
referenceSetID.get(),
|
||||
searchDuringIngest, sendIngestMessages, knownFilesType, readOnly);
|
||||
importSuccess.set(true);
|
||||
} catch (TskCoreException ex) {
|
||||
JOptionPane.showMessageDialog(null, Bundle.ImportCentralRepoDbProgressDialog_addDbError_message());
|
||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error adding imported hash set", ex);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
// Delete this incomplete hash set from the central repo
|
||||
if(referenceSetID >= 0){
|
||||
try{
|
||||
EamDb.getInstance().deleteReferenceSet(referenceSetID);
|
||||
} catch (EamDbException ex2){
|
||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error deleting incomplete hash set from central repository", ex);
|
||||
}
|
||||
}
|
||||
deleteIncompleteSet();
|
||||
Logger.getLogger(ImportCentralRepoDbProgressDialog.class.getName()).log(Level.SEVERE, "Error importing hash set", ex);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,160 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011 - 2017 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Parser for Autopsy/TSK-created databases (*.kdb)
|
||||
*/
|
||||
public class KdbHashSetParser implements HashSetParser {
|
||||
|
||||
private final String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS
|
||||
private final String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS
|
||||
|
||||
private final String filename; // Name of the input file (saved for logging)
|
||||
private final long totalHashes; // Estimated number of hashes
|
||||
private int totalHashesRead = 0; // Number of hashes that have been read
|
||||
private Connection conn;
|
||||
private Statement statement;
|
||||
private ResultSet resultSet;
|
||||
|
||||
KdbHashSetParser(String filename) throws TskCoreException {
|
||||
this.filename = filename;
|
||||
|
||||
conn = null;
|
||||
statement = null;
|
||||
resultSet = null;
|
||||
|
||||
try {
|
||||
// Open the database
|
||||
StringBuilder connectionURL = new StringBuilder();
|
||||
connectionURL.append(JDBC_BASE_URI);
|
||||
connectionURL.append(filename);
|
||||
Class.forName(JDBC_DRIVER);
|
||||
conn = DriverManager.getConnection(connectionURL.toString());
|
||||
|
||||
// Get the number of hashes in the table
|
||||
statement = conn.createStatement();
|
||||
resultSet = statement.executeQuery("SELECT count(*) AS count FROM hashes");
|
||||
if (resultSet.next()) {
|
||||
totalHashes = resultSet.getLong("count");
|
||||
} else {
|
||||
close();
|
||||
throw new TskCoreException("Error getting hash count from database " + filename);
|
||||
}
|
||||
|
||||
// Get the hashes
|
||||
resultSet = statement.executeQuery("SELECT md5 FROM hashes");
|
||||
|
||||
// At this point, getNextHash can read each hash from the result set
|
||||
} catch (ClassNotFoundException | SQLException ex) {
|
||||
throw new TskCoreException("Error opening/reading database " + filename, ex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the next hash to import
|
||||
*
|
||||
* @return The hash as a string
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
@Override
|
||||
public String getNextHash() throws TskCoreException {
|
||||
|
||||
try {
|
||||
if (resultSet.next()) {
|
||||
byte[] hashBytes = resultSet.getBytes("md5");
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (byte b : hashBytes) {
|
||||
sb.append(String.format("%02x", b));
|
||||
}
|
||||
|
||||
if (sb.toString().length() != 32) {
|
||||
throw new TskCoreException("Hash has incorrect length: " + sb.toString());
|
||||
}
|
||||
|
||||
totalHashesRead++;
|
||||
return sb.toString();
|
||||
} else {
|
||||
throw new TskCoreException("Could not read expected number of hashes from database " + filename);
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
throw new TskCoreException("Error reading hash from result set for database " + filename, ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are more hashes to read
|
||||
*
|
||||
* @return true if we've read all expected hash values, false otherwise
|
||||
*/
|
||||
@Override
|
||||
public boolean doneReading() {
|
||||
return (totalHashesRead >= totalHashes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the expected number of hashes in the file.
|
||||
*
|
||||
* @return The expected hash count
|
||||
*/
|
||||
@Override
|
||||
public long getExpectedHashCount() {
|
||||
return totalHashes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the import file
|
||||
*/
|
||||
@Override
|
||||
public final void close() {
|
||||
if (statement != null) {
|
||||
try {
|
||||
statement.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing prepared statement.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
if (resultSet != null) {
|
||||
try {
|
||||
resultSet.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing result set.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
if (conn != null) {
|
||||
try {
|
||||
conn.close();
|
||||
} catch (SQLException ex) {
|
||||
Logger.getLogger(KdbHashSetParser.class.getName()).log(Level.SEVERE, "Error closing connection.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user