still need to work on portable case and user custom events

This commit is contained in:
Greg DiCristofaro 2021-04-26 14:31:02 -04:00
parent 217da17a18
commit 6fb71a03e8
3 changed files with 248 additions and 183 deletions

View File

@ -28,6 +28,7 @@ import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
@ -56,10 +57,50 @@ import org.sleuthkit.datamodel.TskCoreException;
/** /**
* A runnable that - copy the logical image folder to a destination folder - add * A runnable that - copy the logical image folder to a destination folder - add
* SearchResults.txt and *_users.txt files to report - add an image data source to the * SearchResults.txt and *_users.txt files to report - add an image data source
* case database. * to the case database.
*/ */
final class AddLogicalImageTask implements Runnable { final class AddLogicalImageTask implements Runnable {
/**
* Information about a file including the object id of the file as well as
* the object id of the data source.
*/
private static class FileId {
private final long dataSourceId;
private final long fileId;
/**
* Main constructor.
*
* @param dataSourceId Object Id of the data source.
* @param fileId Object Id of the file.
*/
FileId(long dataSourceId, long fileId) {
this.dataSourceId = dataSourceId;
this.fileId = fileId;
}
/**
* Returns the data source id of the file.
*
* @return The data source id of the file.
*/
long getDataSourceId() {
return dataSourceId;
}
/**
* Returns the object id of the file.
*
* @return The object id of the file.
*/
long getFileId() {
return fileId;
}
}
private final static BlackboardArtifact.Type INTERESTING_FILE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT); private final static BlackboardArtifact.Type INTERESTING_FILE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName()); private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName());
@ -108,8 +149,8 @@ final class AddLogicalImageTask implements Runnable {
} }
/** /**
* Add SearchResults.txt and *_users.txt to the case * Add SearchResults.txt and *_users.txt to the case report Adds the image
* report Adds the image to the case database. * to the case database.
*/ */
@Messages({ @Messages({
"# {0} - src", "# {1} - dest", "AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}", "# {0} - src", "# {1} - dest", "AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}",
@ -180,7 +221,7 @@ final class AddLogicalImageTask implements Runnable {
return name.endsWith(USERS_TXT); return name.endsWith(USERS_TXT);
} }
}); });
for (File userFile : userFiles) { for (File userFile : userFiles) {
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(userFile.getName())); progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(userFile.getName()));
status = addReport(userFile.toPath(), userFile.getName() + " " + src.getName()); status = addReport(userFile.toPath(), userFile.getName() + " " + src.getName());
@ -188,10 +229,10 @@ final class AddLogicalImageTask implements Runnable {
errorList.add(status); errorList.add(status);
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources); callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
return; return;
} }
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(userFile.getName())); progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(userFile.getName()));
} }
// Get all VHD files in the dest directory // Get all VHD files in the dest directory
List<String> imagePaths = new ArrayList<>(); List<String> imagePaths = new ArrayList<>();
for (File f : dest.listFiles()) { for (File f : dest.listFiles()) {
@ -217,7 +258,7 @@ final class AddLogicalImageTask implements Runnable {
} }
List<Content> newDataSources = new ArrayList<>(); List<Content> newDataSources = new ArrayList<>();
Map<String, List<Long>> interestingFileMap = new HashMap<>(); Map<String, List<FileId>> interestingFileMap = new HashMap<>();
if (imagePaths.isEmpty()) { if (imagePaths.isEmpty()) {
createVHD = false; createVHD = false;
@ -361,11 +402,11 @@ final class AddLogicalImageTask implements Runnable {
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})", "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})",
"AddLogicalImageTask.logicalImagerResults=Logical Imager results" "AddLogicalImageTask.logicalImagerResults=Logical Imager results"
}) })
private void addInterestingFiles(Map<String, List<Long>> interestingFileMap) throws IOException, TskCoreException { private void addInterestingFiles(Map<String, List<FileId>> interestingFileMap) throws IOException, TskCoreException {
int lineNumber = 0; int lineNumber = 0;
List<BlackboardArtifact> artifacts = new ArrayList<>(); List<BlackboardArtifact> artifacts = new ArrayList<>();
Iterator<Map.Entry<String, List<Long>>> iterator = interestingFileMap.entrySet().iterator(); Iterator<Map.Entry<String, List<FileId>>> iterator = interestingFileMap.entrySet().iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
if (cancelled) { if (cancelled) {
@ -374,14 +415,14 @@ final class AddLogicalImageTask implements Runnable {
break; break;
} }
Map.Entry<String, List<Long>> entry = iterator.next(); Map.Entry<String, List<FileId>> entry = iterator.next();
String key = entry.getKey(); String key = entry.getKey();
String ruleName; String ruleName;
String[] split = key.split("\t"); String[] split = key.split("\t");
ruleName = split[1]; ruleName = split[1];
List<Long> fileIds = entry.getValue(); List<FileId> fileIds = entry.getValue();
for (Long fileId: fileIds) { for (FileId fileId : fileIds) {
if (cancelled) { if (cancelled) {
postArtifacts(artifacts); postArtifacts(artifacts);
return; return;
@ -393,7 +434,7 @@ final class AddLogicalImageTask implements Runnable {
postArtifacts(artifacts); postArtifacts(artifacts);
artifacts.clear(); artifacts.clear();
} }
addInterestingFileToArtifacts(fileId, Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts); addInterestingFileToArtifacts(fileId.getFileId(), fileId.getDataSourceId(), Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts);
lineNumber++; lineNumber++;
} }
iterator.remove(); iterator.remove();
@ -401,34 +442,33 @@ final class AddLogicalImageTask implements Runnable {
postArtifacts(artifacts); postArtifacts(artifacts);
} }
private void addInterestingFileToArtifacts(long fileId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException { private void addInterestingFileToArtifacts(long fileId, long dataSourceId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException {
switch (INTERESTING_FILE_TYPE.getCategory()) { BlackboardArtifact artifact = this.blackboard.newAnalysisResult(
case DATA_ARTIFACT: INTERESTING_FILE_TYPE,
case ANALYSIS_RESULT: fileId,
return this.currentCase.getSleuthkitCase().getBlackboard().newAnalysisResult(INTERESTING_FILE_TYPE, fileId, fileId, Score.SCORE_UNKNOWN, null, null, null); dataSourceId,
default: Score.SCORE_UNKNOWN,
throw new TskCoreException("Unknown category: " + INTERESTING_FILE_TYPE.getCategory().getDisplayName()); null,
} null,
Collection<BlackboardAttribute> attributes = new ArrayList<>(); null,
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName); Arrays.asList(
attributes.add(setNameAttribute); new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName),
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName); new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName)
attributes.add(ruleNameAttribute); ));
//BlackboardArtifact artifact = this.currentCase.getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, fileId);
artifact.addAttributes(attributes);
artifacts.add(artifact); artifacts.add(artifact);
} }
@Messages({ @Messages({
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})" "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})"
}) })
private Map<String, List<Long>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException { private Map<String, List<FileId>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException {
Map<Long, List<String>> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths(); Map<Long, List<String>> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths();
imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap); imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap);
Map<String, List<Long>> interestingFileMap = new HashMap<>(); Map<String, List<FileId>> interestingFileMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new InputStreamReader( try (BufferedReader br = new BufferedReader(new InputStreamReader(
new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS
String line; String line;
br.readLine(); // skip the header line br.readLine(); // skip the header line
int lineNumber = 2; int lineNumber = 2;
@ -458,16 +498,14 @@ final class AddLogicalImageTask implements Runnable {
String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename); String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename);
List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query); List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query);
List<Long> fileIds = new ArrayList<>(); List<FileId> fileIds = new ArrayList<>();
for (AbstractFile file : matchedFiles) { for (AbstractFile file : matchedFiles) {
fileIds.add(file.getId()); fileIds.add(new FileId(file.getDataSourceObjectId(), file.getId()));
} }
String key = String.format("%s\t%s", ruleSetName, ruleName); String key = String.format("%s\t%s", ruleSetName, ruleName);
if (interestingFileMap.containsKey(key)) { interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>())
interestingFileMap.get(key).addAll(fileIds); .addAll(fileIds);
} else {
interestingFileMap.put(key, fileIds);
}
lineNumber++; lineNumber++;
} // end reading file } // end reading file
} }
@ -486,10 +524,10 @@ final class AddLogicalImageTask implements Runnable {
@Messages({ @Messages({
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})" "# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})"
}) })
private Map<String, List<Long>> addExtractedFiles(File src, Path resultsPath, Host host, List<Content> newDataSources) throws TskCoreException, IOException { private Map<String, List<FileId>> addExtractedFiles(File src, Path resultsPath, Host host, List<Content> newDataSources) throws TskCoreException, IOException {
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
SleuthkitCase.CaseDbTransaction trans = null; SleuthkitCase.CaseDbTransaction trans = null;
Map<String, List<Long>> interestingFileMap = new HashMap<>(); Map<String, List<FileId>> interestingFileMap = new HashMap<>();
try { try {
trans = skCase.beginTransaction(); trans = skCase.beginTransaction();
@ -533,21 +571,20 @@ final class AddLogicalImageTask implements Runnable {
//addLocalFile here //addLocalFile here
AbstractFile fileAdded = fileImporter.addLocalFile( AbstractFile fileAdded = fileImporter.addLocalFile(
Paths.get(src.toString(), extractedFilePath).toFile(), Paths.get(src.toString(), extractedFilePath).toFile(),
filename, filename,
parentPath, parentPath,
Long.parseLong(ctime), Long.parseLong(ctime),
Long.parseLong(crtime), Long.parseLong(crtime),
Long.parseLong(atime), Long.parseLong(atime),
Long.parseLong(mtime), Long.parseLong(mtime),
localFilesDataSource); localFilesDataSource);
String key = String.format("%s\t%s", ruleSetName, ruleName); String key = String.format("%s\t%s", ruleSetName, ruleName);
List<Long> value = new ArrayList<>();
if (interestingFileMap.containsKey(key)) { long dataSourceId = fileAdded.getDataSourceObjectId();
value = interestingFileMap.get(key); long fileId = fileAdded.getId();
} interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>())
value.add(fileAdded.getId()); .add(new FileId(dataSourceId, fileId));
interestingFileMap.put(key, value);
lineNumber++; lineNumber++;
} // end reading file } // end reading file
} }

View File

@ -40,13 +40,14 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.TskDataException;
/** /**
* Data source ingest module that verifies the integrity of an Expert Witness * Data source ingest module that verifies the integrity of an Expert Witness
* Format (EWF) E01 image file by generating a hash of the file and comparing it * Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image. Will also generate hashes for any image-type * to the value stored in the image. Will also generate hashes for any
* data source that has none. * image-type data source that has none.
*/ */
public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@ -56,11 +57,11 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
private final boolean computeHashes; private final boolean computeHashes;
private final boolean verifyHashes; private final boolean verifyHashes;
private final List<HashData> hashDataList = new ArrayList<>(); private final List<HashData> hashDataList = new ArrayList<>();
private IngestJobContext context; private IngestJobContext context;
DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) { DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) {
computeHashes = settings.shouldComputeHashes(); computeHashes = settings.shouldComputeHashes();
verifyHashes = settings.shouldVerifyHashes(); verifyHashes = settings.shouldVerifyHashes();
@ -72,13 +73,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context; this.context = context;
// It's an error if the module is run without either option selected // It's an error if the module is run without either option selected
if (!(computeHashes || verifyHashes)) { if (!(computeHashes || verifyHashes)) {
throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected()); throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected());
} }
} }
@NbBundle.Messages({ @NbBundle.Messages({
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled", "DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled",
@ -97,21 +98,20 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
"# {1} - calculatedHashValue", "# {1} - calculatedHashValue",
"DataSourceIntegrityIngestModule.process.calcHashWithType=<li>Calculated {0} hash: {1} </li>", "DataSourceIntegrityIngestModule.process.calcHashWithType=<li>Calculated {0} hash: {1} </li>",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>", "DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>",
"DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated", "DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database", "DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database", "DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database",
"# {0} - hashAlgorithm", "# {0} - hashAlgorithm",
"# {1} - calculatedHashValue", "# {1} - calculatedHashValue",
"# {2} - storedHashValue", "# {2} - storedHashValue",
"DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n", "DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified", "DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification", "DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",})
})
@Override @Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
String imgName = dataSource.getName(); String imgName = dataSource.getName();
@ -132,29 +132,28 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
if (size == 0) { if (size == 0) {
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS
} }
// Determine which mode we're in. // Determine which mode we're in.
// - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected) // - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected)
// - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected) // - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected)
// First get a list of all stored hash types // First get a list of all stored hash types
try { try {
if (img.getMd5() != null && ! img.getMd5().isEmpty()) { if (img.getMd5() != null && !img.getMd5().isEmpty()) {
hashDataList.add(new HashData(HashType.MD5, img.getMd5())); hashDataList.add(new HashData(HashType.MD5, img.getMd5()));
} }
if (img.getSha1() != null && ! img.getSha1().isEmpty()) { if (img.getSha1() != null && !img.getSha1().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA1, img.getSha1())); hashDataList.add(new HashData(HashType.SHA1, img.getSha1()));
} }
if (img.getSha256() != null && ! img.getSha256().isEmpty()) { if (img.getSha256() != null && !img.getSha256().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA256, img.getSha256())); hashDataList.add(new HashData(HashType.SHA256, img.getSha256()));
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName); String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
// Figure out which mode we should be in // Figure out which mode we should be in
Mode mode; Mode mode;
if (hashDataList.isEmpty()) { if (hashDataList.isEmpty()) {
@ -162,30 +161,30 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
} else { } else {
mode = Mode.VERIFY; mode = Mode.VERIFY;
} }
// If that mode was not enabled by the user, exit // If that mode was not enabled by the user, exit
if (mode.equals(Mode.COMPUTE) && ! this.computeHashes) { if (mode.equals(Mode.COMPUTE) && !this.computeHashes) {
logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName))); Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName)));
return ProcessResult.OK; return ProcessResult.OK;
} else if (mode.equals(Mode.VERIFY) && ! this.verifyHashes) { } else if (mode.equals(Mode.VERIFY) && !this.verifyHashes) {
logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName))); Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName)));
return ProcessResult.OK; return ProcessResult.OK;
} }
// If we're in compute mode (i.e., the hash list is empty), add all hash algorithms // If we're in compute mode (i.e., the hash list is empty), add all hash algorithms
// to the list. // to the list.
if (mode.equals(Mode.COMPUTE)) { if (mode.equals(Mode.COMPUTE)) {
for(HashType type : HashType.values()) { for (HashType type : HashType.values()) {
hashDataList.add(new HashData(type, "")); hashDataList.add(new HashData(type, ""));
} }
} }
// Set up the digests // Set up the digests
for (HashData hashData:hashDataList) { for (HashData hashData : hashDataList) {
try { try {
hashData.digest = MessageDigest.getInstance(hashData.type.getName()); hashData.digest = MessageDigest.getInstance(hashData.type.getName());
} catch (NoSuchAlgorithmException ex) { } catch (NoSuchAlgorithmException ex) {
@ -195,7 +194,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
} }
// Libewf uses a chunk size of 64 times the sector size, which is the // Libewf uses a chunk size of 64 times the sector size, which is the
// motivation for using it here. For other images it shouldn't matter, // motivation for using it here. For other images it shouldn't matter,
// so they can use this chunk size as well. // so they can use this chunk size as well.
@ -212,13 +211,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS
} }
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(), NbBundle.getMessage(this.getClass(),
"DataSourceIntegrityIngestModule.process.startingImg", "DataSourceIntegrityIngestModule.process.startingImg",
imgName))); imgName)));
// Set up the progress bar // Set up the progress bar
statusHelper.switchToDeterminate(totalChunks); statusHelper.switchToDeterminate(totalChunks);
// Read in byte size chunks and update the hash value with the data. // Read in byte size chunks and update the hash value with the data.
byte[] data = new byte[(int) chunkSize]; byte[] data = new byte[(int) chunkSize];
int read; int read;
@ -238,33 +237,33 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
// Only update with the read bytes. // Only update with the read bytes.
if (read == chunkSize) { if (read == chunkSize) {
for (HashData struct:hashDataList) { for (HashData struct : hashDataList) {
struct.digest.update(data); struct.digest.update(data);
} }
} else { } else {
byte[] subData = Arrays.copyOfRange(data, 0, read); byte[] subData = Arrays.copyOfRange(data, 0, read);
for (HashData struct:hashDataList) { for (HashData struct : hashDataList) {
struct.digest.update(subData); struct.digest.update(subData);
} }
} }
statusHelper.progress(i); statusHelper.progress(i);
} }
// Produce the final hashes // Produce the final hashes
for(HashData hashData:hashDataList) { for (HashData hashData : hashDataList) {
hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase(); hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase();
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS
} }
if (mode.equals(Mode.VERIFY)) { if (mode.equals(Mode.VERIFY)) {
// Check that each hash matches // Check that each hash matches
boolean verified = true; boolean verified = true;
String detailedResults = NbBundle String detailedResults = NbBundle
.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName); .getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName);
String hashResults = ""; String hashResults = "";
String artifactComment = ""; String artifactComment = "";
for (HashData hashData:hashDataList) { for (HashData hashData : hashDataList) {
if (hashData.storedHash.equals(hashData.calculatedHash)) { if (hashData.storedHash.equals(hashData.calculatedHash)) {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " "; hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " ";
} else { } else {
@ -272,10 +271,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " "; hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " ";
artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name, artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name,
hashData.calculatedHash, hashData.storedHash) + " "; hashData.calculatedHash, hashData.storedHash) + " ";
} }
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash); hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash);
} }
String verificationResultStr; String verificationResultStr;
String messageResultStr; String messageResultStr;
MessageType messageType; MessageType messageType;
@ -288,64 +287,70 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified"); verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified");
messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName); messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName);
} }
detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr); detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr);
detailedResults += hashResults; detailedResults += hashResults;
if (!verified) { if (!verified) {
try { try {
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId()); BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().getBlackboard().newAnalysisResult(
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED),
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment)); img.getId(), img.getId(),
Case.getCurrentCase().getServices().getArtifactsBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); Score.SCORE_UNKNOWN,
null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment)));
Case.getCurrentCase().getServices().getArtifactsBlackboard()
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex); logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting verification failed artifact", ex); logger.log(Level.SEVERE, "Error posting verification failed artifact", ex);
} }
} }
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
messageResultStr, detailedResults)); messageResultStr, detailedResults));
} else { } else {
// Store the hashes in the database and update the image // Store the hashes in the database and update the image
try { try {
String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName); String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName);
for (HashData hashData:hashDataList) { for (HashData hashData : hashDataList) {
switch (hashData.type) { switch (hashData.type) {
case MD5: case MD5:
try { try {
img.setMD5(hashData.calculatedHash); img.setMD5(hashData.calculatedHash);
} catch (TskDataException ex) { } catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex); logger.log(Level.SEVERE, "Error setting calculated hash", ex);
} }
break; break;
case SHA1: case SHA1:
try { try {
img.setSha1(hashData.calculatedHash); img.setSha1(hashData.calculatedHash);
} catch (TskDataException ex) { } catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex); logger.log(Level.SEVERE, "Error setting calculated hash", ex);
} }
break; break;
case SHA256: case SHA256:
try { try {
img.setSha256(hashData.calculatedHash); img.setSha256(hashData.calculatedHash);
} catch (TskDataException ex) { } catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex); logger.log(Level.SEVERE, "Error setting calculated hash", ex);
} }
break; break;
default: default:
break; break;
} }
results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash); results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash);
} }
// Write the inbox message // Write the inbox message
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results)); imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName); String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
@ -353,10 +358,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
} }
return ProcessResult.OK; return ProcessResult.OK;
} }
/** /**
* Enum to track whether we're in computer or verify mode * Enum to track whether we're in computer or verify mode
*/ */
@ -364,36 +369,37 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
COMPUTE, COMPUTE,
VERIFY; VERIFY;
} }
/** /**
* Enum to hold the type of hash. * Enum to hold the type of hash. The value in the "name" field should be
* The value in the "name" field should be compatible with MessageDigest * compatible with MessageDigest
*/ */
private enum HashType { private enum HashType {
MD5("MD5"), MD5("MD5"),
SHA1("SHA-1"), SHA1("SHA-1"),
SHA256("SHA-256"); SHA256("SHA-256");
private final String name; // This should be the string expected by MessageDigest private final String name; // This should be the string expected by MessageDigest
HashType(String name) { HashType(String name) {
this.name = name; this.name = name;
} }
String getName() { String getName() {
return name; return name;
} }
} }
/** /**
* Utility class to hold data for a specific hash algorithm. * Utility class to hold data for a specific hash algorithm.
*/ */
private class HashData { private class HashData {
private HashType type; private HashType type;
private MessageDigest digest; private MessageDigest digest;
private String storedHash; private String storedHash;
private String calculatedHash; private String calculatedHash;
HashData(HashType type, String storedHash) { HashData(HashType type, String storedHash) {
this.type = type; this.type = type;
this.storedHash = storedHash; this.storedHash = storedHash;

View File

@ -64,6 +64,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.CaseDbAccessManager; import org.sleuthkit.datamodel.CaseDbAccessManager;
@ -74,7 +75,9 @@ import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFilesDataSource; import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.Pool; import org.sleuthkit.datamodel.Pool;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction; import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
import org.sleuthkit.datamodel.TagName; import org.sleuthkit.datamodel.TagName;
@ -103,7 +106,7 @@ public class PortableCaseReportModule implements ReportModule {
// These are the types for the exported file subfolders // These are the types for the exported file subfolders
private static final List<FileTypeCategory> FILE_TYPE_CATEGORIES = Arrays.asList(FileTypeCategory.AUDIO, FileTypeCategory.DOCUMENTS, private static final List<FileTypeCategory> FILE_TYPE_CATEGORIES = Arrays.asList(FileTypeCategory.AUDIO, FileTypeCategory.DOCUMENTS,
FileTypeCategory.EXECUTABLE, FileTypeCategory.IMAGE, FileTypeCategory.VIDEO); FileTypeCategory.EXECUTABLE, FileTypeCategory.IMAGE, FileTypeCategory.VIDEO);
// These are attribute types that have special handling and should not be copied // These are attribute types that have special handling and should not be copied
// into the new artifact directly. // into the new artifact directly.
private static final List<Integer> SPECIALLY_HANDLED_ATTRS = Arrays.asList(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT.getTypeID(), private static final List<Integer> SPECIALLY_HANDLED_ATTRS = Arrays.asList(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT.getTypeID(),
@ -452,7 +455,7 @@ public class PortableCaseReportModule implements ReportModule {
if (options.shouldCompress()) { if (options.shouldCompress()) {
progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase()); progressPanel.updateStatusLabel(Bundle.PortableCaseReportModule_generateReport_compressingCase());
if(!compressCase(progressPanel, options.includeApplication() ? outputDir.getAbsolutePath() : caseFolder.getAbsolutePath())){ if (!compressCase(progressPanel, options.includeApplication() ? outputDir.getAbsolutePath() : caseFolder.getAbsolutePath())) {
// Errors have been handled already // Errors have been handled already
return; return;
} }
@ -870,8 +873,9 @@ public class PortableCaseReportModule implements ReportModule {
/** /**
* Add all artifacts with a given tag to the portable case. * Add all artifacts with a given tag to the portable case.
* *
* @param oldTagName The TagName object from the current case * @param dataSourceId The data source id.
* @param progressPanel The progress panel * @param oldTagName The TagName object from the current case.
* @param progressPanel The progress panel.
* *
* @throws TskCoreException * @throws TskCoreException
*/ */
@ -892,11 +896,14 @@ public class PortableCaseReportModule implements ReportModule {
long newContentId = copyContentToPortableCase(content, progressPanel); long newContentId = copyContentToPortableCase(content, progressPanel);
// Copy the artifact // Copy the artifact
BlackboardArtifact newArtifact = copyArtifact(newContentId, tag.getArtifact()); Long dataSourceObjId = content == null || content.getDataSource() == null
? null
: content.getDataSource().getId();
BlackboardArtifact newArtifact = copyArtifact(dataSourceObjId, newContentId, tag.getArtifact());
// Copy any attachments // Copy any attachments
copyAttachments(newArtifact, tag.getArtifact(), portableSkCase.getAbstractFileById(newContentId)); copyAttachments(newArtifact, tag.getArtifact(), portableSkCase.getAbstractFileById(newContentId));
// Copy any files associated with this artifact through the TSK_PATH_ID attribute // Copy any files associated with this artifact through the TSK_PATH_ID attribute
copyPathID(newArtifact, tag.getArtifact()); copyPathID(newArtifact, tag.getArtifact());
@ -912,15 +919,16 @@ public class PortableCaseReportModule implements ReportModule {
* Copy an artifact into the new case. Will also copy any associated * Copy an artifact into the new case. Will also copy any associated
* artifacts * artifacts
* *
* @param newContentId The content ID (in the portable case) of the source * @param newDataSourceId The data source ID (in the portable case).
* content * @param newContentId The content ID (in the portable case) of the
* @param artifactToCopy The artifact to copy * source content.
* @param artifactToCopy The artifact to copy.
* *
* @return The new artifact in the portable case * @return The new artifact in the portable case.
* *
* @throws TskCoreException * @throws TskCoreException
*/ */
private BlackboardArtifact copyArtifact(long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException { private BlackboardArtifact copyArtifact(Long newDataSourceId, long newContentId, BlackboardArtifact artifactToCopy) throws TskCoreException {
if (oldArtifactIdToNewArtifact.containsKey(artifactToCopy.getArtifactID())) { if (oldArtifactIdToNewArtifact.containsKey(artifactToCopy.getArtifactID())) {
return oldArtifactIdToNewArtifact.get(artifactToCopy.getArtifactID()); return oldArtifactIdToNewArtifact.get(artifactToCopy.getArtifactID());
@ -931,14 +939,11 @@ public class PortableCaseReportModule implements ReportModule {
List<BlackboardAttribute> newAttrs = new ArrayList<>(); List<BlackboardAttribute> newAttrs = new ArrayList<>();
if (oldAssociatedAttribute != null) { if (oldAssociatedAttribute != null) {
BlackboardArtifact oldAssociatedArtifact = currentCase.getSleuthkitCase().getBlackboardArtifact(oldAssociatedAttribute.getValueLong()); BlackboardArtifact oldAssociatedArtifact = currentCase.getSleuthkitCase().getBlackboardArtifact(oldAssociatedAttribute.getValueLong());
BlackboardArtifact newAssociatedArtifact = copyArtifact(newContentId, oldAssociatedArtifact); BlackboardArtifact newAssociatedArtifact = copyArtifact(newDataSourceId, newContentId, oldAssociatedArtifact);
newAttrs.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, newAttrs.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
String.join(",", oldAssociatedAttribute.getSources()), newAssociatedArtifact.getArtifactID())); String.join(",", oldAssociatedAttribute.getSources()), newAssociatedArtifact.getArtifactID()));
} }
// Create the new artifact
int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy);
BlackboardArtifact newArtifact = portableSkCase.newBlackboardArtifact(newArtifactTypeId, newContentId);
List<BlackboardAttribute> oldAttrs = artifactToCopy.getAttributes(); List<BlackboardAttribute> oldAttrs = artifactToCopy.getAttributes();
// Copy over each attribute, making sure the type is in the new case. // Copy over each attribute, making sure the type is in the new case.
@ -978,8 +983,21 @@ public class PortableCaseReportModule implements ReportModule {
} }
} }
newArtifact.addAttributes(newAttrs); // Create the new artifact
int newArtifactTypeId = getNewArtifactTypeId(artifactToCopy);
BlackboardArtifact.Type type = new BlackboardArtifact.Type(ARTIFACT_TYPE.fromID(newArtifactTypeId));
BlackboardArtifact newArtifact = null;
switch (type.getCategory()) {
case ANALYSIS_RESULT:
newArtifact = portableSkCase.getBlackboard().newDataArtifact(type, newContentId, newDataSourceId, newAttrs, osAccount);
break;
case DATA_ARTIFACT:
newArtifact = portableSkCase.getBlackboard().newAnalysisResult(type, newContentId, newDataSourceId, Score.SCORE_UNKNOWN, null, null, null, newAttrs);
break;
default:
throw new TskCoreException("Unknown category: " + type.getCategory());
}
oldArtifactIdToNewArtifact.put(artifactToCopy.getArtifactID(), newArtifact); oldArtifactIdToNewArtifact.put(artifactToCopy.getArtifactID(), newArtifact);
return newArtifact; return newArtifact;
} }
@ -1075,19 +1093,21 @@ public class PortableCaseReportModule implements ReportModule {
parentId = copyContent(content.getParent()); parentId = copyContent(content.getParent());
} }
Long dataSourceObjId = content.getDataSource() == null ? null : content.getDataSource().getId();
Content newContent; Content newContent;
if (content instanceof BlackboardArtifact) { if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact) content; BlackboardArtifact artifactToCopy = (BlackboardArtifact) content;
newContent = copyArtifact(parentId, artifactToCopy); newContent = copyArtifact(dataSourceObjId, parentId, artifactToCopy);
} else { } else {
// Get or create the host (if needed) before beginning transaction. // Get or create the host (if needed) before beginning transaction.
Host newHost = null; Host newHost = null;
if (content instanceof DataSource) { if (content instanceof DataSource) {
Host oldHost = ((DataSource)content).getHost(); Host oldHost = ((DataSource) content).getHost();
newHost = portableSkCase.getHostManager().newHost(oldHost.getName()); newHost = portableSkCase.getHostManager().newHost(oldHost.getName());
} }
CaseDbTransaction trans = portableSkCase.beginTransaction(); CaseDbTransaction trans = portableSkCase.beginTransaction();
try { try {
if (content instanceof Image) { if (content instanceof Image) {
@ -1111,7 +1131,7 @@ public class PortableCaseReportModule implements ReportModule {
fs.getName(), trans); fs.getName(), trans);
} else if (content instanceof BlackboardArtifact) { } else if (content instanceof BlackboardArtifact) {
BlackboardArtifact artifactToCopy = (BlackboardArtifact) content; BlackboardArtifact artifactToCopy = (BlackboardArtifact) content;
newContent = copyArtifact(parentId, artifactToCopy); newContent = copyArtifact(dataSourceObjId, parentId, artifactToCopy);
} else if (content instanceof AbstractFile) { } else if (content instanceof AbstractFile) {
AbstractFile abstractFile = (AbstractFile) content; AbstractFile abstractFile = (AbstractFile) content;
@ -1169,12 +1189,13 @@ public class PortableCaseReportModule implements ReportModule {
/** /**
* Copy path ID attribute to new case along with the referenced file. * Copy path ID attribute to new case along with the referenced file.
* *
* @param newArtifact The new artifact in the portable case. Should not have a TSK_PATH_ID attribute. * @param newArtifact The new artifact in the portable case. Should not have
* a TSK_PATH_ID attribute.
* @param oldArtifact The old artifact. * @param oldArtifact The old artifact.
* *
* @throws TskCoreException * @throws TskCoreException
*/ */
private void copyPathID(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact) throws TskCoreException { private void copyPathID(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact) throws TskCoreException {
// Get the path ID attribute // Get the path ID attribute
BlackboardAttribute oldPathIdAttr = oldArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID)); BlackboardAttribute oldPathIdAttr = oldArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
@ -1189,15 +1210,17 @@ public class PortableCaseReportModule implements ReportModule {
} }
} }
} }
/** /**
* Copy attachments to the portable case. * Copy attachments to the portable case.
* *
* @param newArtifact The new artifact in the portable case. Should not have a TSK_ATTACHMENTS attribute. * @param newArtifact The new artifact in the portable case. Should not have
* a TSK_ATTACHMENTS attribute.
* @param oldArtifact The old artifact. * @param oldArtifact The old artifact.
* @param newFile The new file in the portable case associated with the artifact. * @param newFile The new file in the portable case associated with the
* * artifact.
* @throws TskCoreException *
* @throws TskCoreException
*/ */
private void copyAttachments(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact, AbstractFile newFile) throws TskCoreException { private void copyAttachments(BlackboardArtifact newArtifact, BlackboardArtifact oldArtifact, AbstractFile newFile) throws TskCoreException {
// Get the attachments from TSK_ATTACHMENTS attribute. // Get the attachments from TSK_ATTACHMENTS attribute.
@ -1220,20 +1243,19 @@ public class PortableCaseReportModule implements ReportModule {
newFileAttachments.add(new MessageAttachments.FileAttachment(portableSkCase.getAbstractFileById(newFileID))); newFileAttachments.add(new MessageAttachments.FileAttachment(portableSkCase.getAbstractFileById(newFileID)));
} }
} }
// Get the name of the module(s) that created the attachment // Get the name of the module(s) that created the attachment
String newSourceStr = ""; String newSourceStr = "";
List<String> oldSources = attachmentsAttr.getSources(); List<String> oldSources = attachmentsAttr.getSources();
if (! oldSources.isEmpty()) { if (!oldSources.isEmpty()) {
newSourceStr = String.join(",", oldSources); newSourceStr = String.join(",", oldSources);
} }
// Add the attachment. The account type specified in the constructor will not be used. // Add the attachment. The account type specified in the constructor will not be used.
CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(), CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(),
newSourceStr, newFile, Account.Type.EMAIL); newSourceStr, newFile, Account.Type.EMAIL);
communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments())); communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments()));
} } catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {
catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {
throw new TskCoreException(String.format("Unable to parse json for MessageAttachments object in artifact: %s", oldArtifact.getName()), ex); throw new TskCoreException(String.format("Unable to parse json for MessageAttachments object in artifact: %s", oldArtifact.getName()), ex);
} }
} else { // backward compatibility - email message attachments are derived files, children of the message. } else { // backward compatibility - email message attachments are derived files, children of the message.
@ -1404,7 +1426,7 @@ public class PortableCaseReportModule implements ReportModule {
"PortableCaseReportModule.compressCase.errorCompressingCase=Error compressing case", "PortableCaseReportModule.compressCase.errorCompressingCase=Error compressing case",
"PortableCaseReportModule.compressCase.canceled=Compression canceled by user",}) "PortableCaseReportModule.compressCase.canceled=Compression canceled by user",})
private boolean compressCase(ReportProgressPanel progressPanel, String folderToCompress) { private boolean compressCase(ReportProgressPanel progressPanel, String folderToCompress) {
closePortableCaseDatabase(); closePortableCaseDatabase();
// Make a temporary folder for the compressed case // Make a temporary folder for the compressed case