Added verification after processing data is completed, fixed cancellation bug.

This commit is contained in:
Jeff Wallace 2013-10-16 13:09:55 -04:00
parent d71dd5f9ad
commit 82a43bdf03

View File

@ -23,6 +23,8 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.logging.Logger; import java.util.logging.Logger;
import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.StopWatch; import org.sleuthkit.autopsy.coreutils.StopWatch;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController; import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
@ -33,6 +35,7 @@ import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.PipelineContext; import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -44,82 +47,117 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource {
private static final String MODULE_NAME = "ewf Verify"; private static final String MODULE_NAME = "ewf Verify";
private static final String MODULE_VERSION = "1.0"; private static final String MODULE_VERSION = "1.0";
private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files."; private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files.";
private static final long CHUNK_SIZE = 16 * 1024; private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
private IngestServices services; private IngestServices services;
private volatile boolean running = false; private volatile boolean running = false;
private Image img; private Image img;
private String imgName;
private MessageDigest md; private MessageDigest md;
private Logger logger; private static Logger logger = null;
private static int messageId = 0; private static int messageId = 0;
private volatile boolean cancelled = false; private volatile boolean cancelled = false;
private boolean verified = false; private boolean verified = false;
private SleuthkitCase skCase;
public EwfVerifyIngestModule() { public EwfVerifyIngestModule() {
} }
@Override @Override
public void process(PipelineContext<IngestModuleDataSource> pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) { public void process(PipelineContext<IngestModuleDataSource> pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
imgName = dataSource.getName();
try { try {
img = dataSource.getImage(); img = dataSource.getImage();
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
img = null; img = null;
logger.log(Level.SEVERE, "Failed to get image from Content.", ex); logger.log(Level.SEVERE, "Failed to get image from Content.", ex);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + dataSource.getName())); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error processing " + imgName));
return;
} }
// Skip images that are not E01
if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) { if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
img = null; img = null;
// TODO notify?
logger.log(Level.INFO, "Skipping non-ewf image " + img.getName()); logger.log(Level.INFO, "Skipping non-ewf image " + img.getName());
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
"Skipping non-ewf image " + imgName));
return;
}
// Get the hash stored in the E01 file from the database
String storedHash = "";
if (skCase.imageHasHash(img)) {
try {
storedHash = skCase.getImageHash(img).toLowerCase();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to get stored hash from image " + imgName, ex);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error retrieving stored hash value from " + imgName));
return;
}
} else {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Image " + imgName + " does not have stored hash."));
return; return;
} }
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Starting " + dataSource.getName())); logger.log(Level.INFO, "Starting ewf verification of " + img.getName());
long size = img.getSize(); // size of the image services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
"Starting " + imgName));
// TODO handle size = 0 long size = img.getSize();
if (size == 0) {
logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried.");
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error getting size of " + imgName + ". Image will not be processed."));
}
int totalChunks = (int) Math.ceil(size / CHUNK_SIZE); // Libewf uses a sector size of 64 times the sector size, which is the
System.out.println("TOTAL CHUNKS = " + totalChunks); // motivation for using it here.
long chunkSize = 64 * img.getSsize();
chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
int totalChunks = (int) Math.ceil(size / chunkSize);
logger.log(Level.INFO, "Total chunks = " + totalChunks);
int read; int read;
// TODO find an appropriate size for this.
byte[] data; byte[] data;
controller.switchToDeterminate(totalChunks); controller.switchToDeterminate(totalChunks);
running = true; running = true;
StopWatch timer = new StopWatch(); // Read in byte size chunks and update the hash value with the data.
timer.start();
for (int i = 0; i < totalChunks; i++) { for (int i = 0; i < totalChunks; i++) {
if (cancelled) { if (controller.isCancelled()) {
timer.stop();
running = false; running = false;
return; return;
} }
data = new byte[ (int) CHUNK_SIZE ]; data = new byte[ (int) chunkSize ];
try { try {
read = img.read(data, i * CHUNK_SIZE, CHUNK_SIZE); read = img.read(data, i * chunkSize, chunkSize);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + img.getName())); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, "Error processing " + img.getName()));
logger.log(Level.SEVERE, "Error reading from image: " + img.getName(), ex); logger.log(Level.SEVERE, "Error reading from image: " + imgName, ex);
return;
} }
md.update(data); md.update(data);
controller.progress(i); controller.progress(i);
} }
timer.stop();
byte[] byteHash = md.digest(); // Finish generating the hash and get it as a string value
String hash = bytesToString(byteHash); String hash = DatatypeConverter.printHexBinary(md.digest()).toLowerCase();
System.out.println("MD5 HASH: " + hash); // String hash = bytesToString(md.digest());
System.out.println("GENERATING HASH TOOK " + timer.getElapsedTimeSecs() + " SECONDS"); verified = hash.equals(storedHash);
logger.log(Level.INFO, "Calculated MD5 hash: " + hash);
running = false; running = false;
// TODO logic to check if it is verified.
verified = true;
} }
@Override @Override
public void init(IngestModuleInit initContext) { public void init(IngestModuleInit initContext) {
services = IngestServices.getDefault(); services = IngestServices.getDefault();
logger = services.getLogger(this); skCase = Case.getCurrentCase().getSleuthkitCase();
if (logger == null) {
logger = services.getLogger(this);
}
try { try {
md = MessageDigest.getInstance("MD5"); md = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException ex) { } catch (NoSuchAlgorithmException ex) {
@ -129,18 +167,19 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource {
cancelled = false; cancelled = false;
running = false; running = false;
img = null; img = null;
imgName = "";
} }
@Override @Override
public void complete() { public void complete() {
logger.info("complete() " + this.getName()); logger.info("complete() " + this.getName());
String msg = verified ? " verified." : " not verified."; String msg = verified ? " verified." : " not verified.";
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, img.getName() + msg)); services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg));
logger.info(imgName + msg);
} }
@Override @Override
public void stop() { public void stop() {
cancelled = true;
} }
@Override @Override
@ -162,12 +201,4 @@ public class EwfVerifyIngestModule extends IngestModuleDataSource {
public boolean hasBackgroundJobsRunning() { public boolean hasBackgroundJobsRunning() {
return running; return running;
} }
private String bytesToString(byte[] byteHash) {
StringBuilder sb = new StringBuilder();
for (byte b : byteHash) {
sb.append(String.format("%02x", b&0xff));
}
return sb.toString();
}
} }