diff --git a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java index a40d0f4910..7ee7d7c47e 100644 --- a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java +++ b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java @@ -28,6 +28,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; @@ -56,10 +57,50 @@ import org.sleuthkit.datamodel.TskCoreException; /** * A runnable that - copy the logical image folder to a destination folder - add - * SearchResults.txt and *_users.txt files to report - add an image data source to the - * case database. + * SearchResults.txt and *_users.txt files to report - add an image data source + * to the case database. */ final class AddLogicalImageTask implements Runnable { + + /** + * Information about a file including the object id of the file as well as + * the object id of the data source. + */ + private static class FileId { + + private final long dataSourceId; + private final long fileId; + + /** + * Main constructor. + * + * @param dataSourceId Object Id of the data source. + * @param fileId Object Id of the file. + */ + FileId(long dataSourceId, long fileId) { + this.dataSourceId = dataSourceId; + this.fileId = fileId; + } + + /** + * Returns the data source id of the file. + * + * @return The data source id of the file. + */ + long getDataSourceId() { + return dataSourceId; + } + + /** + * Returns the object id of the file. + * + * @return The object id of the file. + */ + long getFileId() { + return fileId; + } + } + private final static BlackboardArtifact.Type INTERESTING_FILE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT); private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName()); @@ -108,8 +149,8 @@ final class AddLogicalImageTask implements Runnable { } /** - * Add SearchResults.txt and *_users.txt to the case - * report Adds the image to the case database. + * Add SearchResults.txt and *_users.txt to the case report Adds the image + * to the case database. */ @Messages({ "# {0} - src", "# {1} - dest", "AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}", @@ -180,7 +221,7 @@ final class AddLogicalImageTask implements Runnable { return name.endsWith(USERS_TXT); } }); - + for (File userFile : userFiles) { progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(userFile.getName())); status = addReport(userFile.toPath(), userFile.getName() + " " + src.getName()); @@ -188,10 +229,10 @@ final class AddLogicalImageTask implements Runnable { errorList.add(status); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); return; - } + } progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(userFile.getName())); } - + // Get all VHD files in the dest directory List imagePaths = new ArrayList<>(); for (File f : dest.listFiles()) { @@ -217,7 +258,7 @@ final class AddLogicalImageTask implements Runnable { } List newDataSources = new ArrayList<>(); - Map> interestingFileMap = new HashMap<>(); + Map> interestingFileMap = new HashMap<>(); if (imagePaths.isEmpty()) { createVHD = false; @@ -361,11 +402,11 @@ final class AddLogicalImageTask implements Runnable { "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})", "AddLogicalImageTask.logicalImagerResults=Logical Imager results" }) - private void addInterestingFiles(Map> interestingFileMap) throws IOException, TskCoreException { + private void addInterestingFiles(Map> interestingFileMap) throws IOException, TskCoreException { int lineNumber = 0; List artifacts = new ArrayList<>(); - Iterator>> iterator = interestingFileMap.entrySet().iterator(); + Iterator>> iterator = interestingFileMap.entrySet().iterator(); while (iterator.hasNext()) { if (cancelled) { @@ -374,14 +415,14 @@ final class AddLogicalImageTask implements Runnable { break; } - Map.Entry> entry = iterator.next(); + Map.Entry> entry = iterator.next(); String key = entry.getKey(); String ruleName; String[] split = key.split("\t"); ruleName = split[1]; - List fileIds = entry.getValue(); - for (Long fileId: fileIds) { + List fileIds = entry.getValue(); + for (FileId fileId : fileIds) { if (cancelled) { postArtifacts(artifacts); return; @@ -393,7 +434,7 @@ final class AddLogicalImageTask implements Runnable { postArtifacts(artifacts); artifacts.clear(); } - addInterestingFileToArtifacts(fileId, Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts); + addInterestingFileToArtifacts(fileId.getFileId(), fileId.getDataSourceId(), Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts); lineNumber++; } iterator.remove(); @@ -401,34 +442,33 @@ final class AddLogicalImageTask implements Runnable { postArtifacts(artifacts); } - private void addInterestingFileToArtifacts(long fileId, String ruleSetName, String ruleName, List artifacts) throws TskCoreException { - switch (INTERESTING_FILE_TYPE.getCategory()) { - case DATA_ARTIFACT: - case ANALYSIS_RESULT: - return this.currentCase.getSleuthkitCase().getBlackboard().newAnalysisResult(INTERESTING_FILE_TYPE, fileId, fileId, Score.SCORE_UNKNOWN, null, null, null); - default: - throw new TskCoreException("Unknown category: " + INTERESTING_FILE_TYPE.getCategory().getDisplayName()); - } - Collection attributes = new ArrayList<>(); - BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName); - attributes.add(setNameAttribute); - BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName); - attributes.add(ruleNameAttribute); - //BlackboardArtifact artifact = this.currentCase.getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, fileId); - artifact.addAttributes(attributes); + private void addInterestingFileToArtifacts(long fileId, long dataSourceId, String ruleSetName, String ruleName, List artifacts) throws TskCoreException { + BlackboardArtifact artifact = this.blackboard.newAnalysisResult( + INTERESTING_FILE_TYPE, + fileId, + dataSourceId, + Score.SCORE_UNKNOWN, + null, + null, + null, + Arrays.asList( + new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName), + new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName) + )); + artifacts.add(artifact); } @Messages({ "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})" }) - private Map> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException { + private Map> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException { Map> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths(); imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap); - Map> interestingFileMap = new HashMap<>(); + Map> interestingFileMap = new HashMap<>(); try (BufferedReader br = new BufferedReader(new InputStreamReader( - new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS + new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS String line; br.readLine(); // skip the header line int lineNumber = 2; @@ -458,16 +498,14 @@ final class AddLogicalImageTask implements Runnable { String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename); List matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query); - List fileIds = new ArrayList<>(); + List fileIds = new ArrayList<>(); for (AbstractFile file : matchedFiles) { - fileIds.add(file.getId()); + fileIds.add(new FileId(file.getDataSourceObjectId(), file.getId())); } String key = String.format("%s\t%s", ruleSetName, ruleName); - if (interestingFileMap.containsKey(key)) { - interestingFileMap.get(key).addAll(fileIds); - } else { - interestingFileMap.put(key, fileIds); - } + interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>()) + .addAll(fileIds); + lineNumber++; } // end reading file } @@ -486,10 +524,10 @@ final class AddLogicalImageTask implements Runnable { @Messages({ "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})" }) - private Map> addExtractedFiles(File src, Path resultsPath, Host host, List newDataSources) throws TskCoreException, IOException { + private Map> addExtractedFiles(File src, Path resultsPath, Host host, List newDataSources) throws TskCoreException, IOException { SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); SleuthkitCase.CaseDbTransaction trans = null; - Map> interestingFileMap = new HashMap<>(); + Map> interestingFileMap = new HashMap<>(); try { trans = skCase.beginTransaction(); @@ -533,21 +571,20 @@ final class AddLogicalImageTask implements Runnable { //addLocalFile here AbstractFile fileAdded = fileImporter.addLocalFile( - Paths.get(src.toString(), extractedFilePath).toFile(), - filename, - parentPath, - Long.parseLong(ctime), - Long.parseLong(crtime), - Long.parseLong(atime), - Long.parseLong(mtime), - localFilesDataSource); + Paths.get(src.toString(), extractedFilePath).toFile(), + filename, + parentPath, + Long.parseLong(ctime), + Long.parseLong(crtime), + Long.parseLong(atime), + Long.parseLong(mtime), + localFilesDataSource); String key = String.format("%s\t%s", ruleSetName, ruleName); - List value = new ArrayList<>(); - if (interestingFileMap.containsKey(key)) { - value = interestingFileMap.get(key); - } - value.add(fileAdded.getId()); - interestingFileMap.put(key, value); + + long dataSourceId = fileAdded.getDataSourceObjectId(); + long fileId = fileAdded.getId(); + interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>()) + .add(new FileId(dataSourceId, fileId)); lineNumber++; } // end reading file } diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java index 8fd2bc331a..58553711c2 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java @@ -40,13 +40,14 @@ import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.TskDataException; /** * Data source ingest module that verifies the integrity of an Expert Witness * Format (EWF) E01 image file by generating a hash of the file and comparing it - * to the value stored in the image. Will also generate hashes for any image-type - * data source that has none. + * to the value stored in the image. Will also generate hashes for any + * image-type data source that has none. */ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { @@ -56,11 +57,11 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { private final boolean computeHashes; private final boolean verifyHashes; - + private final List hashDataList = new ArrayList<>(); - + private IngestJobContext context; - + DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) { computeHashes = settings.shouldComputeHashes(); verifyHashes = settings.shouldVerifyHashes(); @@ -72,13 +73,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; - + // It's an error if the module is run without either option selected if (!(computeHashes || verifyHashes)) { throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected()); } } - + @NbBundle.Messages({ "# {0} - imageName", "DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled", @@ -97,21 +98,20 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { "# {1} - calculatedHashValue", "DataSourceIntegrityIngestModule.process.calcHashWithType=
  • Calculated {0} hash: {1}
  • ", "# {0} - imageName", - "DataSourceIntegrityIngestModule.process.calculateHashDone=

    Data Source Hash Calculation Results for {0}

    ", - "DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated", + "DataSourceIntegrityIngestModule.process.calculateHashDone=

    Data Source Hash Calculation Results for {0}

    ", + "DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated", "# {0} - imageName", - "DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database", + "DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database", "# {0} - imageName", - "DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database", + "DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database", "# {0} - hashAlgorithm", "# {1} - calculatedHashValue", "# {2} - storedHashValue", - "DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n", + "DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n", "# {0} - imageName", "DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified", "# {0} - imageName", - "DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification", - }) + "DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",}) @Override public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { String imgName = dataSource.getName(); @@ -132,29 +132,28 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { if (size == 0) { logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS } - + // Determine which mode we're in. // - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected) // - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected) - // First get a list of all stored hash types try { - if (img.getMd5() != null && ! img.getMd5().isEmpty()) { + if (img.getMd5() != null && !img.getMd5().isEmpty()) { hashDataList.add(new HashData(HashType.MD5, img.getMd5())); } - if (img.getSha1() != null && ! img.getSha1().isEmpty()) { + if (img.getSha1() != null && !img.getSha1().isEmpty()) { hashDataList.add(new HashData(HashType.SHA1, img.getSha1())); } - if (img.getSha256() != null && ! img.getSha256().isEmpty()) { + if (img.getSha256() != null && !img.getSha256().isEmpty()) { hashDataList.add(new HashData(HashType.SHA256, img.getSha256())); } } catch (TskCoreException ex) { - String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName); - services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); - logger.log(Level.SEVERE, msg, ex); - return ProcessResult.ERROR; + String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName); + services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); + logger.log(Level.SEVERE, msg, ex); + return ProcessResult.ERROR; } - + // Figure out which mode we should be in Mode mode; if (hashDataList.isEmpty()) { @@ -162,30 +161,30 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { } else { mode = Mode.VERIFY; } - + // If that mode was not enabled by the user, exit - if (mode.equals(Mode.COMPUTE) && ! this.computeHashes) { + if (mode.equals(Mode.COMPUTE) && !this.computeHashes) { logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName))); return ProcessResult.OK; - } else if (mode.equals(Mode.VERIFY) && ! this.verifyHashes) { + } else if (mode.equals(Mode.VERIFY) && !this.verifyHashes) { logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName))); return ProcessResult.OK; } - + // If we're in compute mode (i.e., the hash list is empty), add all hash algorithms // to the list. if (mode.equals(Mode.COMPUTE)) { - for(HashType type : HashType.values()) { + for (HashType type : HashType.values()) { hashDataList.add(new HashData(type, "")); } } - + // Set up the digests - for (HashData hashData:hashDataList) { + for (HashData hashData : hashDataList) { try { hashData.digest = MessageDigest.getInstance(hashData.type.getName()); } catch (NoSuchAlgorithmException ex) { @@ -195,7 +194,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { return ProcessResult.ERROR; } } - + // Libewf uses a chunk size of 64 times the sector size, which is the // motivation for using it here. For other images it shouldn't matter, // so they can use this chunk size as well. @@ -212,13 +211,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS } services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "DataSourceIntegrityIngestModule.process.startingImg", - imgName))); - + NbBundle.getMessage(this.getClass(), + "DataSourceIntegrityIngestModule.process.startingImg", + imgName))); + // Set up the progress bar statusHelper.switchToDeterminate(totalChunks); - + // Read in byte size chunks and update the hash value with the data. byte[] data = new byte[(int) chunkSize]; int read; @@ -238,33 +237,33 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { // Only update with the read bytes. if (read == chunkSize) { - for (HashData struct:hashDataList) { + for (HashData struct : hashDataList) { struct.digest.update(data); } } else { byte[] subData = Arrays.copyOfRange(data, 0, read); - for (HashData struct:hashDataList) { + for (HashData struct : hashDataList) { struct.digest.update(subData); } } statusHelper.progress(i); } - + // Produce the final hashes - for(HashData hashData:hashDataList) { + for (HashData hashData : hashDataList) { hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase(); logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS } - + if (mode.equals(Mode.VERIFY)) { // Check that each hash matches boolean verified = true; String detailedResults = NbBundle - .getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName); + .getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName); String hashResults = ""; String artifactComment = ""; - - for (HashData hashData:hashDataList) { + + for (HashData hashData : hashDataList) { if (hashData.storedHash.equals(hashData.calculatedHash)) { hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " "; } else { @@ -272,10 +271,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " "; artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name, hashData.calculatedHash, hashData.storedHash) + " "; - } + } hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash); } - + String verificationResultStr; String messageResultStr; MessageType messageType; @@ -288,64 +287,70 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified"); messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName); } - + detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr); detailedResults += hashResults; - + if (!verified) { try { - BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId()); - verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, - DataSourceIntegrityModuleFactory.getModuleName(), artifactComment)); - Case.getCurrentCase().getServices().getArtifactsBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); + BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().getBlackboard().newAnalysisResult( + new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED), + img.getId(), img.getId(), + Score.SCORE_UNKNOWN, + null, null, null, + Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, + DataSourceIntegrityModuleFactory.getModuleName(), artifactComment))); + + Case.getCurrentCase().getServices().getArtifactsBlackboard() + .postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating verification failed artifact", ex); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Error posting verification failed artifact", ex); - } + } } - services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(), + services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(), messageResultStr, detailedResults)); - + } else { // Store the hashes in the database and update the image try { String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName); - - for (HashData hashData:hashDataList) { + + for (HashData hashData : hashDataList) { switch (hashData.type) { case MD5: try { - img.setMD5(hashData.calculatedHash); - } catch (TskDataException ex) { - logger.log(Level.SEVERE, "Error setting calculated hash", ex); - } - break; + img.setMD5(hashData.calculatedHash); + } catch (TskDataException ex) { + logger.log(Level.SEVERE, "Error setting calculated hash", ex); + } + break; case SHA1: try { - img.setSha1(hashData.calculatedHash); - } catch (TskDataException ex) { - logger.log(Level.SEVERE, "Error setting calculated hash", ex); - } - break; + img.setSha1(hashData.calculatedHash); + } catch (TskDataException ex) { + logger.log(Level.SEVERE, "Error setting calculated hash", ex); + } + break; case SHA256: try { - img.setSha256(hashData.calculatedHash); - } catch (TskDataException ex) { - logger.log(Level.SEVERE, "Error setting calculated hash", ex); - } - break; + img.setSha256(hashData.calculatedHash); + } catch (TskDataException ex) { + logger.log(Level.SEVERE, "Error setting calculated hash", ex); + } + break; default: break; } results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash); } - + // Write the inbox message - services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), + services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results)); - + } catch (TskCoreException ex) { String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName); services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); @@ -353,10 +358,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { return ProcessResult.ERROR; } } - + return ProcessResult.OK; } - + /** * Enum to track whether we're in computer or verify mode */ @@ -364,36 +369,37 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { COMPUTE, VERIFY; } - + /** - * Enum to hold the type of hash. - * The value in the "name" field should be compatible with MessageDigest + * Enum to hold the type of hash. The value in the "name" field should be + * compatible with MessageDigest */ private enum HashType { - MD5("MD5"), + MD5("MD5"), SHA1("SHA-1"), SHA256("SHA-256"); - + private final String name; // This should be the string expected by MessageDigest - + HashType(String name) { this.name = name; } - + String getName() { return name; } } - + /** * Utility class to hold data for a specific hash algorithm. */ private class HashData { + private HashType type; private MessageDigest digest; private String storedHash; private String calculatedHash; - + HashData(HashType type, String storedHash) { this.type = type; this.storedHash = storedHash; diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java index f8dbebe224..d01fe0a10e 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java @@ -64,6 +64,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; +import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.CaseDbAccessManager; @@ -74,7 +75,9 @@ import org.sleuthkit.datamodel.FileSystem; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.LocalFilesDataSource; +import org.sleuthkit.datamodel.OsAccount; import org.sleuthkit.datamodel.Pool; +import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.TagName; @@ -103,7 +106,7 @@ public class PortableCaseReportModule implements ReportModule { // These are the types for the exported file subfolders private static final List FILE_TYPE_CATEGORIES = Arrays.asList(FileTypeCategory.AUDIO, FileTypeCategory.DOCUMENTS, FileTypeCategory.EXECUTABLE, FileTypeCategory.IMAGE, FileTypeCategory.VIDEO); - + // These are attribute types that have special handling and should not be copied // into the new artifact directly. private static final List SPECIALLY_HANDLED_ATTRS = Arrays.asList(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT.getTypeID(), @@ -452,7 +455,7 @@ public class PortableCaseReportModule implements ReportModule { if (options.shouldCompress()) { progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase()); - if(!compressCase(progressPanel, options.includeApplication() ? outputDir.getAbsolutePath() : caseFolder.getAbsolutePath())){ + if (!compressCase(progressPanel, options.includeApplication() ? outputDir.getAbsolutePath() : caseFolder.getAbsolutePath())) { // Errors have been handled already return; } @@ -870,8 +873,9 @@ public class PortableCaseReportModule implements ReportModule { /** * Add all artifacts with a given tag to the portable case. * - * @param oldTagName The TagName object from the current case - * @param progressPanel The progress panel + * @param dataSourceId The data source id. + * @param oldTagName The TagName object from the current case. + * @param progressPanel The progress panel. * * @throws TskCoreException */ @@ -892,11 +896,14 @@ public class PortableCaseReportModule implements ReportModule { long newContentId = copyContentToPortableCase(content, progressPanel); // Copy the artifact - BlackboardArtifact newArtifact = copyArtifact(newContentId, tag.getArtifact()); - + Long dataSourceObjId = content == null || content.getDataSource() == null + ? null + : content.getDataSource().getId(); + BlackboardArtifact newArtifact = copyArtifact(dataSourceObjId, newContentId, tag.getArtifact()); + // Copy any attachments copyAttachments(newArtifact, tag.getArtifact(), portableSkCase.getAbstractFileById(newContentId)); - + // Copy any files associated with this artifact through the TSK_PATH_ID attribute copyPathID(newArtifact, tag.getArtifact()); @@ -912,15 +919,16 @@ public class PortableCaseReportModule implements ReportModule { * Copy an artifact into the new case. Will also copy any associated * artifacts * - * @param newContentId The content ID (in the portable case) of the source - * content - * @param artifactToCopy The artifact to copy + * @param newDataSourceId The data source ID (in the portable case). + * @param newContentId The content ID (in the portable case) of the + * source content. + * @param artifactToCopy The artifact to copy. * - * @return The new artifact in the portable case + * @return The new artifact in the portable case. * * @throws TskCoreException */ - private BlackboardArtifact copyArtifact(long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException { + private BlackboardArtifact copyArtifact(Long newDataSourceId, long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException { if (oldArtifactIdToNewArtifact.containsKey(artifactToCopy.getArtifactID())) { return oldArtifactIdToNewArtifact.get(artifactToCopy.getArtifactID()); @@ -931,14 +939,11 @@ public class PortableCaseReportModule implements ReportModule { List newAttrs = new ArrayList<>(); if (oldAssociatedAttribute != null) { BlackboardArtifact oldAssociatedArtifact = currentCase.getSleuthkitCase().getBlackboardArtifact(oldAssociatedAttribute.getValueLong()); - BlackboardArtifact newAssociatedArtifact = copyArtifact(newContentId, oldAssociatedArtifact); + BlackboardArtifact newAssociatedArtifact = copyArtifact(newDataSourceId, newContentId, oldAssociatedArtifact); newAttrs.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, String.join(",", oldAssociatedAttribute.getSources()), newAssociatedArtifact.getArtifactID())); } - - // Create the new artifact - int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy); - BlackboardArtifact newArtifact = portableSkCase.newBlackboardArtifact(newArtifactTypeId, newContentId); + List oldAttrs = artifactToCopy.getAttributes(); // Copy over each attribute, making sure the type is in the new case. @@ -978,8 +983,21 @@ public class PortableCaseReportModule implements ReportModule { } } - newArtifact.addAttributes(newAttrs); - + // Create the new artifact + int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy); + BlackboardArtifact.Type type = new BlackboardArtifact.Type(ARTIFACT_TYPE.fromID(newArtifactTypeId)); + BlackboardArtifact newArtifact = null; + switch (type.getCategory()) { + case ANALYSIS_RESULT: + newArtifact = portableSkCase.getBlackboard().newDataArtifact(type, newContentId, newDataSourceId, newAttrs, osAccount); + break; + case DATA_ARTIFACT: + newArtifact = portableSkCase.getBlackboard().newAnalysisResult(type, newContentId, newDataSourceId, Score.SCORE_UNKNOWN, null, null, null, newAttrs); + break; + default: + throw new TskCoreException("Unknown category: " + type.getCategory()); + } + oldArtifactIdToNewArtifact.put(artifactToCopy.getArtifactID(), newArtifact); return newArtifact; } @@ -1075,19 +1093,21 @@ public class PortableCaseReportModule implements ReportModule { parentId = copyContent(content.getParent()); } + Long dataSourceObjId = content.getDataSource() == null ? null : content.getDataSource().getId(); + Content newContent; if (content instanceof BlackboardArtifact) { BlackboardArtifact artifactToCopy = (BlackboardArtifact) content; - newContent = copyArtifact(parentId, artifactToCopy); + newContent = copyArtifact(dataSourceObjId, parentId, artifactToCopy); } else { - + // Get or create the host (if needed) before beginning transaction. Host newHost = null; if (content instanceof DataSource) { - Host oldHost = ((DataSource)content).getHost(); + Host oldHost = ((DataSource) content).getHost(); newHost = portableSkCase.getHostManager().newHost(oldHost.getName()); } - + CaseDbTransaction trans = portableSkCase.beginTransaction(); try { if (content instanceof Image) { @@ -1111,7 +1131,7 @@ public class PortableCaseReportModule implements ReportModule { fs.getName(), trans); } else if (content instanceof BlackboardArtifact) { BlackboardArtifact artifactToCopy = (BlackboardArtifact) content; - newContent = copyArtifact(parentId, artifactToCopy); + newContent = copyArtifact(dataSourceObjId, parentId, artifactToCopy); } else if (content instanceof AbstractFile) { AbstractFile abstractFile = (AbstractFile) content; @@ -1169,12 +1189,13 @@ public class PortableCaseReportModule implements ReportModule { /** * Copy path ID attribute to new case along with the referenced file. - * - * @param newArtifact The new artifact in the portable case. Should not have a TSK_PATH_ID attribute. + * + * @param newArtifact The new artifact in the portable case. Should not have + * a TSK_PATH_ID attribute. * @param oldArtifact The old artifact. - * - * @throws TskCoreException - */ + * + * @throws TskCoreException + */ private void copyPathID(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact) throws TskCoreException { // Get the path ID attribute BlackboardAttribute oldPathIdAttr = oldArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID)); @@ -1189,15 +1210,17 @@ public class PortableCaseReportModule implements ReportModule { } } } - + /** * Copy attachments to the portable case. - * - * @param newArtifact The new artifact in the portable case. Should not have a TSK_ATTACHMENTS attribute. + * + * @param newArtifact The new artifact in the portable case. Should not have + * a TSK_ATTACHMENTS attribute. * @param oldArtifact The old artifact. - * @param newFile The new file in the portable case associated with the artifact. - * - * @throws TskCoreException + * @param newFile The new file in the portable case associated with the + * artifact. + * + * @throws TskCoreException */ private void copyAttachments(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact, AbstractFile newFile) throws TskCoreException { // Get the attachments from TSK_ATTACHMENTS attribute. @@ -1220,20 +1243,19 @@ public class PortableCaseReportModule implements ReportModule { newFileAttachments.add(new MessageAttachments.FileAttachment(portableSkCase.getAbstractFileById(newFileID))); } } - + // Get the name of the module(s) that created the attachment String newSourceStr = ""; List oldSources = attachmentsAttr.getSources(); - if (! oldSources.isEmpty()) { + if (!oldSources.isEmpty()) { newSourceStr = String.join(",", oldSources); } - + // Add the attachment. The account type specified in the constructor will not be used. CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(), newSourceStr, newFile, Account.Type.EMAIL); communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments())); - } - catch (BlackboardJsonAttrUtil.InvalidJsonException ex) { + } catch (BlackboardJsonAttrUtil.InvalidJsonException ex) { throw new TskCoreException(String.format("Unable to parse json for MessageAttachments object in artifact: %s", oldArtifact.getName()), ex); } } else { // backward compatibility - email message attachments are derived files, children of the message. @@ -1404,7 +1426,7 @@ public class PortableCaseReportModule implements ReportModule { "PortableCaseReportModule.compressCase.errorCompressingCase=Error compressing case", "PortableCaseReportModule.compressCase.canceled=Compression canceled by user",}) private boolean compressCase(ReportProgressPanel progressPanel, String folderToCompress) { - + closePortableCaseDatabase(); // Make a temporary folder for the compressed case