mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-17 10:17:41 +00:00
Merge remote-tracking branch 'upstream/develop' into fix_sub_cluster_loading
This commit is contained in:
commit
e11ce2c3f1
@ -23,6 +23,7 @@ import java.security.NoSuchAlgorithmException;
|
||||
import java.util.logging.Level;
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.python.bouncycastle.util.Arrays;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
@ -48,7 +49,6 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
|
||||
private MessageDigest messageDigest;
|
||||
private boolean verified = false;
|
||||
private boolean skipped = false;
|
||||
private String calculatedHash = "";
|
||||
private String storedHash = "";
|
||||
private IngestJobContext context;
|
||||
@ -75,8 +75,10 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
@Override
|
||||
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
|
||||
String imgName = dataSource.getName();
|
||||
|
||||
// Skip non-images
|
||||
if (!(dataSource instanceof Image)) {
|
||||
logger.log(Level.INFO, "Skipping disk image image {0}", imgName); //NON-NLS
|
||||
logger.log(Level.INFO, "Skipping non-image {0}", imgName); //NON-NLS
|
||||
services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(),
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"EwfVerifyIngestModule.process.skipNonEwf",
|
||||
@ -95,16 +97,17 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
if ((img.getMd5() != null) && !img.getMd5().isEmpty()) {
|
||||
storedHash = img.getMd5().toLowerCase();
|
||||
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); //NON-NLS
|
||||
} else {
|
||||
// Report an error for null or empty MD5
|
||||
if ((img.getMd5() == null) || img.getMd5().isEmpty()) {
|
||||
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, E01VerifierModuleFactory.getModuleName(),
|
||||
NbBundle.getMessage(this.getClass(),
|
||||
"EwfVerifyIngestModule.process.noStoredHash",
|
||||
imgName)));
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
storedHash = img.getMd5().toLowerCase();
|
||||
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash}); //NON-NLS
|
||||
|
||||
logger.log(Level.INFO, "Starting hash verification of {0}", img.getName()); //NON-NLS
|
||||
services.postMessage(IngestMessage.createMessage(MessageType.INFO, E01VerifierModuleFactory.getModuleName(),
|
||||
@ -126,11 +129,12 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
long chunkSize = 64 * img.getSsize();
|
||||
chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
|
||||
|
||||
int totalChunks = (int) Math.ceil(size / chunkSize);
|
||||
// Casting to double to capture decimals
|
||||
int totalChunks = (int) Math.ceil((double)size / (double)chunkSize);
|
||||
logger.log(Level.INFO, "Total chunks = {0}", totalChunks); //NON-NLS
|
||||
int read;
|
||||
|
||||
byte[] data;
|
||||
byte[] data = new byte[(int) chunkSize];
|
||||
statusHelper.switchToDeterminate(totalChunks);
|
||||
|
||||
// Read in byte size chunks and update the hash value with the data.
|
||||
@ -138,7 +142,6 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
data = new byte[(int) chunkSize];
|
||||
try {
|
||||
read = img.read(data, i * chunkSize, chunkSize);
|
||||
} catch (TskCoreException ex) {
|
||||
@ -148,7 +151,14 @@ public class E01VerifyIngestModule implements DataSourceIngestModule {
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
messageDigest.update(data);
|
||||
|
||||
// Only update with the read bytes.
|
||||
if(read == chunkSize) {
|
||||
messageDigest.update(data);
|
||||
} else {
|
||||
byte[] subData = Arrays.copyOfRange(data, 0, read);
|
||||
messageDigest.update(subData);
|
||||
}
|
||||
statusHelper.progress(i);
|
||||
}
|
||||
|
||||
|
@ -87,6 +87,9 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
updateEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets);
|
||||
|
||||
if (refCounter.incrementAndGet(jobId) == 1) {
|
||||
// initialize job totals
|
||||
getTotalsForIngestJobs(jobId);
|
||||
|
||||
// if first module for this job then post error msgs if needed
|
||||
|
||||
if (knownBadHashSets.isEmpty()) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user