merge from 7317

This commit is contained in:
Greg DiCristofaro 2021-05-13 08:10:02 -04:00
commit 2cbfea0d8f
43 changed files with 523 additions and 385 deletions

View File

@ -68,6 +68,7 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.PersonaAccount;
import org.sleuthkit.datamodel.Account;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT;
import org.sleuthkit.datamodel.CommunicationsUtils;
import org.sleuthkit.datamodel.Score;
/**
* Listen for ingest events and update entries in the Central Repository
@ -205,17 +206,20 @@ public class IngestEventsListener {
}
/**
* Make an Interesting Item artifact based on a new artifact being previously seen.
* Make an Interesting Item artifact based on a new artifact being
* previously seen.
*
* @param originalArtifact Original artifact that we want to flag
* @param caseDisplayNames List of case names artifact was previously seen in
* @param caseDisplayNames List of case names artifact was previously seen
* in
*/
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"IngestEventsListener.prevCaseComment.text=Previous Case: "})
static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()),
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))),
@ -230,7 +234,8 @@ public class IngestEventsListener {
* in the central repository.
*
* @param originalArtifact the artifact to create the interesting item for
* @param caseDisplayNames the case names the artifact was previously seen in
* @param caseDisplayNames the case names the artifact was previously seen
* in
*/
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
"# {0} - typeName",
@ -238,8 +243,8 @@ public class IngestEventsListener {
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text()),
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text()),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))),
@ -250,9 +255,11 @@ public class IngestEventsListener {
}
/**
* Make an interesting item artifact to flag the passed in artifact.
* @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new Interesting items artifact
* Make an interesting item artifact to flag the passed in artifact.
*
* @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new
* Interesting items artifact
*/
private static void makeAndPostInterestingArtifact(BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact) {
try {
@ -261,8 +268,10 @@ public class IngestEventsListener {
Blackboard blackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_ARTIFACT_HIT, attributesForNewArtifact)) {
BlackboardArtifact newInterestingArtifact = abstractFile.newArtifact(TSK_INTERESTING_ARTIFACT_HIT);
newInterestingArtifact.addAttributes(attributesForNewArtifact);
BlackboardArtifact newInterestingArtifact = abstractFile.newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_ARTIFACT_HIT),
Score.SCORE_UNKNOWN, null, null, null, attributesForNewArtifact)
.getAnalysisResult();
try {
// index the artifact for keyword search
@ -320,7 +329,7 @@ public class IngestEventsListener {
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
return;
}
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
case DATA_SOURCE_ANALYSIS_COMPLETED: {
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
@ -334,15 +343,15 @@ public class IngestEventsListener {
}
private final class AnalysisCompleteTask implements Runnable {
private final CentralRepository dbManager;
private final PropertyChangeEvent event;
private AnalysisCompleteTask(CentralRepository db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
}
@Override
public void run() {
// clear the tracker to reduce memory usage
@ -370,7 +379,7 @@ public class IngestEventsListener {
if (!(dataSource instanceof Image)) {
return;
}
dataSourceName = dataSource.getName();
dataSourceObjectId = dataSource.getId();
@ -398,7 +407,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
correlationDataSource.setMd5(imageMd5Hash);
}
String imageSha1Hash = image.getSha1();
if (imageSha1Hash == null) {
imageSha1Hash = "";
@ -407,7 +416,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
correlationDataSource.setSha1(imageSha1Hash);
}
String imageSha256Hash = image.getSha256();
if (imageSha256Hash == null) {
imageSha256Hash = "";
@ -484,7 +493,7 @@ public class IngestEventsListener {
}
}
if (flagPreviousItemsEnabled
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID

View File

@ -56,6 +56,7 @@ import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.datamodel.Score;
/**
* Ingest module for inserting entries into the Central Repository database on
@ -345,8 +346,10 @@ final class CentralRepoIngestModule implements FileIngestModule {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT);
tifArtifact.addAttributes(attributes);
BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_FILE_HIT),
Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
// index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME);

View File

@ -378,19 +378,23 @@ public final class FileTypes implements AutopsyVisitableItem {
return content.getChildrenIds();
}
@Deprecated
@SuppressWarnings("Deprecated")
@Override
public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
return content.newArtifact(artifactTypeID);
}
@Deprecated
@SuppressWarnings("Deprecated")
@Override
public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
return content.newArtifact(type);
}
@Override
public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount) throws TskCoreException {
return content.newDataArtifact(artifactType, attributesList, osAccount);
public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
return content.newDataArtifact(artifactType, attributesList, osAccountId);
}
@Override

View File

@ -24,6 +24,7 @@ import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -313,9 +314,8 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
}
if (!otherAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG);
artifact.addAttributes(otherAttributes);
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
}
} else {

View File

@ -24,6 +24,7 @@ import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -142,9 +143,8 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
} else {
// Just create an artifact with the attributes that we do have.
if (!additionalAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT);
artifact.addAttributes(additionalAttributes);
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
}
}

View File

@ -25,6 +25,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
@ -105,9 +106,11 @@ final class XRYDeviceGenInfoFileParser extends AbstractSingleEntityParser {
}
}
if(!attributes.isEmpty()) {
BlackboardArtifact artifact = parent.newArtifact(
BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_INFO);
artifact.addAttributes(attributes);
if (parent instanceof AbstractFile) {
parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_INFO), attributes);
} else {
parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_INFO), attributes, null);
}
}
}

View File

@ -23,6 +23,7 @@ import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -78,8 +79,11 @@ final class XRYWebBookmarksFileParser extends AbstractSingleEntityParser {
}
}
if(!attributes.isEmpty()) {
BlackboardArtifact artifact = parent.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
artifact.addAttributes(attributes);
if (parent instanceof AbstractFile) {
parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK), attributes);
} else {
parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK), attributes, null);
}
}
}
}

View File

@ -28,6 +28,7 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
@ -36,6 +37,7 @@ import java.util.Map;
import java.util.logging.Level;
import javax.annotation.concurrent.GuardedBy;
import org.apache.commons.io.FileUtils;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
@ -50,16 +52,58 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Host;
import org.sleuthkit.datamodel.LocalFilesDataSource;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* A runnable that - copy the logical image folder to a destination folder - add
* SearchResults.txt and *_users.txt files to report - add an image data source to the
* case database.
* SearchResults.txt and *_users.txt files to report - add an image data source
* to the case database.
*/
final class AddLogicalImageTask implements Runnable {
/**
* Information about a file including the object id of the file as well as
* the object id of the data source.
*/
private static class FileId {
private final long dataSourceId;
private final long fileId;
/**
* Main constructor.
*
* @param dataSourceId Object Id of the data source.
* @param fileId Object Id of the file.
*/
FileId(long dataSourceId, long fileId) {
this.dataSourceId = dataSourceId;
this.fileId = fileId;
}
/**
* Returns the data source id of the file.
*
* @return The data source id of the file.
*/
long getDataSourceId() {
return dataSourceId;
}
/**
* Returns the object id of the file.
*
* @return The object id of the file.
*/
long getFileId() {
return fileId;
}
}
private final static BlackboardArtifact.Type INTERESTING_FILE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
private final static Logger LOGGER = Logger.getLogger(AddLogicalImageTask.class.getName());
private final static String SEARCH_RESULTS_TXT = "SearchResults.txt"; //NON-NLS
private final static String USERS_TXT = "_users.txt"; //NON-NLS
@ -106,8 +150,8 @@ final class AddLogicalImageTask implements Runnable {
}
/**
* Add SearchResults.txt and *_users.txt to the case
* report Adds the image to the case database.
* Add SearchResults.txt and *_users.txt to the case report Adds the image
* to the case database.
*/
@Messages({
"# {0} - src", "# {1} - dest", "AddLogicalImageTask.copyingImageFromTo=Copying image from {0} to {1}",
@ -178,7 +222,7 @@ final class AddLogicalImageTask implements Runnable {
return name.endsWith(USERS_TXT);
}
});
for (File userFile : userFiles) {
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_addingToReport(userFile.getName()));
status = addReport(userFile.toPath(), userFile.getName() + " " + src.getName());
@ -186,10 +230,10 @@ final class AddLogicalImageTask implements Runnable {
errorList.add(status);
callback.done(DataSourceProcessorCallback.DataSourceProcessorResult.CRITICAL_ERRORS, errorList, emptyDataSources);
return;
}
}
progressMonitor.setProgressText(Bundle.AddLogicalImageTask_doneAddingToReport(userFile.getName()));
}
// Get all VHD files in the dest directory
List<String> imagePaths = new ArrayList<>();
for (File f : dest.listFiles()) {
@ -215,7 +259,7 @@ final class AddLogicalImageTask implements Runnable {
}
List<Content> newDataSources = new ArrayList<>();
Map<String, List<Long>> interestingFileMap = new HashMap<>();
Map<String, List<FileId>> interestingFileMap = new HashMap<>();
if (imagePaths.isEmpty()) {
createVHD = false;
@ -359,11 +403,11 @@ final class AddLogicalImageTask implements Runnable {
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingInterestingFile=Adding interesting files ({0}/{1})",
"AddLogicalImageTask.logicalImagerResults=Logical Imager results"
})
private void addInterestingFiles(Map<String, List<Long>> interestingFileMap) throws IOException, TskCoreException {
private void addInterestingFiles(Map<String, List<FileId>> interestingFileMap) throws IOException, TskCoreException {
int lineNumber = 0;
List<BlackboardArtifact> artifacts = new ArrayList<>();
Iterator<Map.Entry<String, List<Long>>> iterator = interestingFileMap.entrySet().iterator();
Iterator<Map.Entry<String, List<FileId>>> iterator = interestingFileMap.entrySet().iterator();
while (iterator.hasNext()) {
if (cancelled) {
@ -372,14 +416,14 @@ final class AddLogicalImageTask implements Runnable {
break;
}
Map.Entry<String, List<Long>> entry = iterator.next();
Map.Entry<String, List<FileId>> entry = iterator.next();
String key = entry.getKey();
String ruleName;
String[] split = key.split("\t");
ruleName = split[1];
List<Long> fileIds = entry.getValue();
for (Long fileId: fileIds) {
List<FileId> fileIds = entry.getValue();
for (FileId fileId : fileIds) {
if (cancelled) {
postArtifacts(artifacts);
return;
@ -391,7 +435,7 @@ final class AddLogicalImageTask implements Runnable {
postArtifacts(artifacts);
artifacts.clear();
}
addInterestingFileToArtifacts(fileId, Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts);
addInterestingFileToArtifacts(fileId.getFileId(), fileId.getDataSourceId(), Bundle.AddLogicalImageTask_logicalImagerResults(), ruleName, artifacts);
lineNumber++;
}
iterator.remove();
@ -399,27 +443,39 @@ final class AddLogicalImageTask implements Runnable {
postArtifacts(artifacts);
}
private void addInterestingFileToArtifacts(long fileId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException {
Collection<BlackboardAttribute> attributes = new ArrayList<>();
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName);
attributes.add(setNameAttribute);
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName);
attributes.add(ruleNameAttribute);
BlackboardArtifact artifact = this.currentCase.getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, fileId);
artifact.addAttributes(attributes);
private void addInterestingFileToArtifacts(long fileId, long dataSourceId, String ruleSetName, String ruleName, List<BlackboardArtifact> artifacts) throws TskCoreException {
BlackboardArtifact artifact;
try {
artifact = this.blackboard.newAnalysisResult(
INTERESTING_FILE_TYPE,
fileId,
dataSourceId,
Score.SCORE_UNKNOWN,
null,
null,
null,
Arrays.asList(
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, ruleSetName),
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, MODULE_NAME, ruleName)
))
.getAnalysisResult();
} catch (Blackboard.BlackboardException ex) {
throw new TskCoreException("Unable to create analysis result.", ex);
}
artifacts.add(artifact);
}
@Messages({
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.searchingInterestingFile=Searching for interesting files ({0}/{1})"
})
private Map<String, List<Long>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException {
private Map<String, List<FileId>> getInterestingFileMapForVHD(Path resultsPath) throws TskCoreException, IOException {
Map<Long, List<String>> objIdToimagePathsMap = currentCase.getSleuthkitCase().getImagePaths();
imagePathToObjIdMap = imagePathsToDataSourceObjId(objIdToimagePathsMap);
Map<String, List<Long>> interestingFileMap = new HashMap<>();
Map<String, List<FileId>> interestingFileMap = new HashMap<>();
try (BufferedReader br = new BufferedReader(new InputStreamReader(
new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS
new FileInputStream(resultsPath.toFile()), "UTF8"))) { // NON-NLS
String line;
br.readLine(); // skip the header line
int lineNumber = 2;
@ -449,16 +505,14 @@ final class AddLogicalImageTask implements Runnable {
String query = makeQuery(vhdFilename, fileMetaAddressStr, parentPath, filename);
List<AbstractFile> matchedFiles = Case.getCurrentCase().getSleuthkitCase().findAllFilesWhere(query);
List<Long> fileIds = new ArrayList<>();
List<FileId> fileIds = new ArrayList<>();
for (AbstractFile file : matchedFiles) {
fileIds.add(file.getId());
fileIds.add(new FileId(file.getDataSourceObjectId(), file.getId()));
}
String key = String.format("%s\t%s", ruleSetName, ruleName);
if (interestingFileMap.containsKey(key)) {
interestingFileMap.get(key).addAll(fileIds);
} else {
interestingFileMap.put(key, fileIds);
}
interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>())
.addAll(fileIds);
lineNumber++;
} // end reading file
}
@ -477,10 +531,10 @@ final class AddLogicalImageTask implements Runnable {
@Messages({
"# {0} - file number", "# {1} - total files", "AddLogicalImageTask.addingExtractedFile=Adding extracted files ({0}/{1})"
})
private Map<String, List<Long>> addExtractedFiles(File src, Path resultsPath, Host host, List<Content> newDataSources) throws TskCoreException, IOException {
private Map<String, List<FileId>> addExtractedFiles(File src, Path resultsPath, Host host, List<Content> newDataSources) throws TskCoreException, IOException {
SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase();
SleuthkitCase.CaseDbTransaction trans = null;
Map<String, List<Long>> interestingFileMap = new HashMap<>();
Map<String, List<FileId>> interestingFileMap = new HashMap<>();
try {
trans = skCase.beginTransaction();
@ -524,21 +578,20 @@ final class AddLogicalImageTask implements Runnable {
//addLocalFile here
AbstractFile fileAdded = fileImporter.addLocalFile(
Paths.get(src.toString(), extractedFilePath).toFile(),
filename,
parentPath,
Long.parseLong(ctime),
Long.parseLong(crtime),
Long.parseLong(atime),
Long.parseLong(mtime),
localFilesDataSource);
Paths.get(src.toString(), extractedFilePath).toFile(),
filename,
parentPath,
Long.parseLong(ctime),
Long.parseLong(crtime),
Long.parseLong(atime),
Long.parseLong(mtime),
localFilesDataSource);
String key = String.format("%s\t%s", ruleSetName, ruleName);
List<Long> value = new ArrayList<>();
if (interestingFileMap.containsKey(key)) {
value = interestingFileMap.get(key);
}
value.add(fileAdded.getId());
interestingFileMap.put(key, value);
long dataSourceId = fileAdded.getDataSourceObjectId();
long fileId = fileAdded.getId();
interestingFileMap.computeIfAbsent(key, (k) -> new ArrayList<>())
.add(new FileId(dataSourceId, fileId));
lineNumber++;
} // end reading file
}

View File

@ -40,13 +40,14 @@ import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskDataException;
/**
* Data source ingest module that verifies the integrity of an Expert Witness
* Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image. Will also generate hashes for any image-type
* data source that has none.
* to the value stored in the image. Will also generate hashes for any
* image-type data source that has none.
*/
public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@ -56,11 +57,11 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
private final boolean computeHashes;
private final boolean verifyHashes;
private final List<HashData> hashDataList = new ArrayList<>();
private IngestJobContext context;
DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) {
computeHashes = settings.shouldComputeHashes();
verifyHashes = settings.shouldVerifyHashes();
@ -72,13 +73,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
// It's an error if the module is run without either option selected
if (!(computeHashes || verifyHashes)) {
throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected());
}
}
@NbBundle.Messages({
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled",
@ -97,21 +98,20 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
"# {1} - calculatedHashValue",
"DataSourceIntegrityIngestModule.process.calcHashWithType=<li>Calculated {0} hash: {1} </li>",
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>",
"DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated",
"DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>",
"DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated",
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database",
"DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database",
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database",
"DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database",
"# {0} - hashAlgorithm",
"# {1} - calculatedHashValue",
"# {2} - storedHashValue",
"DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n",
"DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n",
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified",
"# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",
})
"DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",})
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
String imgName = dataSource.getName();
@ -132,29 +132,28 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
if (size == 0) {
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS
}
// Determine which mode we're in.
// - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected)
// - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected)
// First get a list of all stored hash types
try {
if (img.getMd5() != null && ! img.getMd5().isEmpty()) {
if (img.getMd5() != null && !img.getMd5().isEmpty()) {
hashDataList.add(new HashData(HashType.MD5, img.getMd5()));
}
if (img.getSha1() != null && ! img.getSha1().isEmpty()) {
if (img.getSha1() != null && !img.getSha1().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA1, img.getSha1()));
}
if (img.getSha256() != null && ! img.getSha256().isEmpty()) {
if (img.getSha256() != null && !img.getSha256().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA256, img.getSha256()));
}
} catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex);
return ProcessResult.ERROR;
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex);
return ProcessResult.ERROR;
}
// Figure out which mode we should be in
Mode mode;
if (hashDataList.isEmpty()) {
@ -162,30 +161,30 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
} else {
mode = Mode.VERIFY;
}
// If that mode was not enabled by the user, exit
if (mode.equals(Mode.COMPUTE) && ! this.computeHashes) {
if (mode.equals(Mode.COMPUTE) && !this.computeHashes) {
logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName)));
return ProcessResult.OK;
} else if (mode.equals(Mode.VERIFY) && ! this.verifyHashes) {
} else if (mode.equals(Mode.VERIFY) && !this.verifyHashes) {
logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName)));
return ProcessResult.OK;
}
// If we're in compute mode (i.e., the hash list is empty), add all hash algorithms
// to the list.
if (mode.equals(Mode.COMPUTE)) {
for(HashType type : HashType.values()) {
for (HashType type : HashType.values()) {
hashDataList.add(new HashData(type, ""));
}
}
// Set up the digests
for (HashData hashData:hashDataList) {
for (HashData hashData : hashDataList) {
try {
hashData.digest = MessageDigest.getInstance(hashData.type.getName());
} catch (NoSuchAlgorithmException ex) {
@ -195,7 +194,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR;
}
}
// Libewf uses a chunk size of 64 times the sector size, which is the
// motivation for using it here. For other images it shouldn't matter,
// so they can use this chunk size as well.
@ -212,13 +211,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS
}
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"DataSourceIntegrityIngestModule.process.startingImg",
imgName)));
NbBundle.getMessage(this.getClass(),
"DataSourceIntegrityIngestModule.process.startingImg",
imgName)));
// Set up the progress bar
statusHelper.switchToDeterminate(totalChunks);
// Read in byte size chunks and update the hash value with the data.
byte[] data = new byte[(int) chunkSize];
int read;
@ -238,33 +237,33 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
// Only update with the read bytes.
if (read == chunkSize) {
for (HashData struct:hashDataList) {
for (HashData struct : hashDataList) {
struct.digest.update(data);
}
} else {
byte[] subData = Arrays.copyOfRange(data, 0, read);
for (HashData struct:hashDataList) {
for (HashData struct : hashDataList) {
struct.digest.update(subData);
}
}
statusHelper.progress(i);
}
// Produce the final hashes
for(HashData hashData:hashDataList) {
for (HashData hashData : hashDataList) {
hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase();
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS
}
if (mode.equals(Mode.VERIFY)) {
// Check that each hash matches
boolean verified = true;
String detailedResults = NbBundle
.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName);
.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName);
String hashResults = "";
String artifactComment = "";
for (HashData hashData:hashDataList) {
for (HashData hashData : hashDataList) {
if (hashData.storedHash.equals(hashData.calculatedHash)) {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " ";
} else {
@ -272,10 +271,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " ";
artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name,
hashData.calculatedHash, hashData.storedHash) + " ";
}
}
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash);
}
String verificationResultStr;
String messageResultStr;
MessageType messageType;
@ -288,64 +287,71 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified");
messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName);
}
detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr);
detailedResults += hashResults;
if (!verified) {
try {
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId());
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment));
Case.getCurrentCase().getServices().getArtifactsBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().getBlackboard().newAnalysisResult(
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED),
img.getId(), img.getId(),
Score.SCORE_UNKNOWN,
null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment)))
.getAnalysisResult();
Case.getCurrentCase().getServices().getArtifactsBlackboard()
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting verification failed artifact", ex);
}
}
}
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
messageResultStr, detailedResults));
} else {
// Store the hashes in the database and update the image
try {
String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName);
for (HashData hashData:hashDataList) {
for (HashData hashData : hashDataList) {
switch (hashData.type) {
case MD5:
try {
img.setMD5(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
img.setMD5(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
case SHA1:
try {
img.setSha1(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
img.setSha1(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
case SHA256:
try {
img.setSha256(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
img.setSha256(hashData.calculatedHash);
} catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex);
}
break;
default:
break;
}
results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash);
}
// Write the inbox message
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results));
} catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
@ -353,10 +359,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR;
}
}
return ProcessResult.OK;
}
/**
* Enum to track whether we're in computer or verify mode
*/
@ -364,36 +370,37 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
COMPUTE,
VERIFY;
}
/**
* Enum to hold the type of hash.
* The value in the "name" field should be compatible with MessageDigest
* Enum to hold the type of hash. The value in the "name" field should be
* compatible with MessageDigest
*/
private enum HashType {
MD5("MD5"),
MD5("MD5"),
SHA1("SHA-1"),
SHA256("SHA-256");
private final String name; // This should be the string expected by MessageDigest
HashType(String name) {
this.name = name;
}
String getName() {
return name;
}
}
/**
* Utility class to hold data for a specific hash algorithm.
*/
private class HashData {
private HashType type;
private MessageDigest digest;
private String storedHash;
private String calculatedHash;
HashData(HashType type, String storedHash) {
this.type = type;
this.storedHash = storedHash;

View File

@ -76,6 +76,7 @@ import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.EncodedFileOutputStream;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -314,8 +315,9 @@ class SevenZipExtractor {
details));
if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
BlackboardArtifact artifact = rootArchive.getArchiveFile().newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_FILE_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
/*
* post the artifact which will index the artifact for
@ -852,8 +854,11 @@ class SevenZipExtractor {
if (hasEncrypted) {
String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL;
try {
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType));
BlackboardArtifact artifact = archiveFile.newAnalysisResult(
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED), Score.SCORE_UNKNOWN,
null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType)))
.getAnalysisResult();
try {
/*

View File

@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.modules.encryptiondetection;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
@ -36,6 +37,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
@ -80,31 +82,31 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
try {
if (dataSource instanceof Image) {
if (((Image) dataSource).getPaths().length == 0) {
logger.log(Level.SEVERE, String.format("Unable to process data source '%s' - image has no paths", dataSource.getName()));
return IngestModule.ProcessResult.ERROR;
}
List<VolumeSystem> volumeSystems = ((Image) dataSource).getVolumeSystems();
progressBar.switchToDeterminate(volumeSystems.size());
int numVolSystemsChecked = 0;
progressBar.progress(Bundle.EncryptionDetectionDataSourceIngestModule_processing_message(), 0);
for (VolumeSystem volumeSystem : volumeSystems) {
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}
for (Volume volume : volumeSystem.getVolumes()) {
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}
if (BitlockerDetection.isBitlockerVolume(volume)) {
return flagVolume(volume, BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED, Bundle.EncryptionDetectionDataSourceIngestModule_artifactComment_bitlocker());
}
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}
@ -144,22 +146,24 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
* Create a blackboard artifact.
*
* @param volume The volume to be processed.
* @param artifactType The type of artifact to create.
* @param artifactType The type of artifact to create. This is assumed to be
* an analysis result type.
* @param comment A comment to be attached to the artifact.
*
* @return 'OK' if the volume was processed successfully, or 'ERROR' if
* there was a problem.
*/
private IngestModule.ProcessResult flagVolume(Volume volume, BlackboardArtifact.ARTIFACT_TYPE artifactType, String comment) {
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}
try {
BlackboardArtifact artifact = volume.newArtifact(artifactType);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EncryptionDetectionModuleFactory.getModuleName(), comment));
try {
BlackboardArtifact artifact = volume.newAnalysisResult(new BlackboardArtifact.Type(artifactType), Score.SCORE_UNKNOWN, null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EncryptionDetectionModuleFactory.getModuleName(), comment)))
.getAnalysisResult();
try {
/*
* post the artifact which will index the artifact for keyword

View File

@ -29,6 +29,7 @@ import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.BufferUnderflowException;
import java.util.Arrays;
import java.util.logging.Level;
import org.apache.tika.exception.EncryptedDocumentException;
import org.apache.tika.exception.TikaException;
@ -52,6 +53,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.xml.sax.ContentHandler;
@ -106,7 +108,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
try {
validateSettings();
this.context = context;
this.context = context;
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
fileTypeDetector = new FileTypeDetector();
@ -130,12 +132,12 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* verify the file hasn't been deleted.
*/
if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
/*
* Is the file in FILE_IGNORE_LIST?
*/
@ -187,7 +189,8 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* Create a blackboard artifact.
*
* @param file The file to be processed.
* @param artifactType The type of artifact to create.
* @param artifactType The type of artifact to create. Assumed to be an
* analysis result type.
* @param comment A comment to be attached to the artifact.
*
* @return 'OK' if the file was processed successfully, or 'ERROR' if there
@ -198,10 +201,11 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
if (context.fileIngestIsCancelled()) {
return IngestModule.ProcessResult.OK;
}
BlackboardArtifact artifact = file.newArtifact(artifactType);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
EncryptionDetectionModuleFactory.getModuleName(), comment));
BlackboardArtifact artifact = file.newAnalysisResult(new BlackboardArtifact.Type(artifactType), Score.SCORE_UNKNOWN, null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
EncryptionDetectionModuleFactory.getModuleName(), comment)))
.getAnalysisResult();
try {
/*
@ -326,14 +330,14 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
accessDatabase = databaseBuilder.open();
} catch (InvalidCredentialsException ex) {
logger.log(Level.INFO, String.format(
"Jackcess throws invalid credentials exception for file (name: %s, id: %s). It will be assumed to be password protected.",
"Jackcess throws invalid credentials exception for file (name: %s, id: %s). It will be assumed to be password protected.",
file.getName(), file.getId()));
return true;
} catch (Exception ex) { // Firewall, see JIRA-7097
logger.log(Level.WARNING, String.format("Unexpected exception "
+ "trying to open msaccess database using Jackcess "
+ "(name: %s, id: %d)", file.getName(), file.getId()), ex);
return passwordProtected;
return passwordProtected;
}
/*
* No exception has been thrown at this point, so the file

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.modules.fileextmismatch;
import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
import java.util.logging.Level;
@ -38,6 +39,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskException;
@ -119,15 +121,15 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|| (abstractFile.isFile() == false)) {
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|| (abstractFile.isFile() == false)) {
return ProcessResult.OK;
}
// deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC))
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ProcessResult.OK;
}
@ -140,7 +142,9 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
if (mismatchDetected) {
// add artifact
BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED);
BlackboardArtifact bart = abstractFile.newAnalysisResult(
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED), Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList())
.getAnalysisResult();
try {
/*

View File

@ -40,6 +40,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTER
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -162,8 +163,10 @@ public class FileTypeIdIngestModule implements FileIngestModule {
Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
BlackboardArtifact artifact = file.newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_FILE_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
/*
* post the artifact which will index the artifact for

View File

@ -49,7 +49,10 @@ import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.Score;
<<<<<<< HEAD
import org.sleuthkit.datamodel.Score.Significance;
=======
>>>>>>> 7317-dataArtifacts
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -534,24 +537,16 @@ public class HashDbIngestModule implements FileIngestModule {
try {
String moduleName = HashLookupModuleFactory.getModuleName();
List<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, db.getDisplayName()),
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash),
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment)
);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
//TODO Revisit usage of deprecated constructor as per TSK-583
//BlackboardAttribute att2 = new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(), MODULE_NAME, "Known Bad", hashSetName);
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, hashSetName));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HASH_MD5, moduleName, md5Hash));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, moduleName, comment));
String conclusion = TBD;
String configuration = TBD;
String justification = TBD;
// BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList
BlackboardArtifact badFile = abstractFile.newAnalysisResult(
BlackboardArtifact.Type.TSK_HASHSET_HIT, getScore(db.getKnownFilesType()),
conclusion, configuration, justification,
attributes
).getAnalysisResult();
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_HASHSET_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
/*
* post the artifact which will index the artifact for keyword

View File

@ -43,6 +43,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTER
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -142,7 +143,10 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
BlackboardArtifact artifact = file.newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_FILE_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
artifact.addAttributes(attributes);
try {

View File

@ -71,6 +71,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper;
@ -392,7 +393,7 @@ public final class LeappFileProcessor {
geoAbstractFile = createTrackpoint(bbattributes, dataSource, fileName, trackpointSegmentName, pointList);
break;
default: // There is no relationship defined so just process the artifact normally
BlackboardArtifact bbartifact = createArtifactWithAttributes(artifactType.getTypeID(), dataSource, bbattributes);
BlackboardArtifact bbartifact = createArtifactWithAttributes(artifactType, dataSource, bbattributes);
if (bbartifact != null) {
bbartifacts.add(bbartifact);
}
@ -1208,8 +1209,7 @@ public final class LeappFileProcessor {
/**
* Generic method for creating a blackboard artifact with attributes
*
* @param type is a blackboard.artifact_type enum to determine which type
* the artifact should be
* @param artType The artifact type.
* @param dataSource is the Content object that needs to have the artifact
* added for it
* @param bbattributes is the collection of blackboard attributes that need
@ -1217,11 +1217,17 @@ public final class LeappFileProcessor {
*
* @return The newly-created artifact, or null on error
*/
private BlackboardArtifact createArtifactWithAttributes(int type, Content dataSource, Collection<BlackboardAttribute> bbattributes) {
private BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type artType, Content dataSource, Collection<BlackboardAttribute> bbattributes) {
try {
BlackboardArtifact bbart = dataSource.newArtifact(type);
bbart.addAttributes(bbattributes);
return bbart;
switch (artType.getCategory()) {
case DATA_ARTIFACT:
return dataSource.newDataArtifact(artType, bbattributes);
case ANALYSIS_RESULT:
return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult();
default:
logger.log(Level.SEVERE, "Unknown category type: " + artType.getCategory().getDisplayName());
return null;
}
} catch (TskException ex) {
logger.log(Level.WARNING, Bundle.LeappFileProcessor_error_creating_new_artifacts(), ex); //NON-NLS
}

View File

@ -29,6 +29,7 @@ import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Set;
@ -56,11 +57,12 @@ import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.modules.pictureanalyzer.spi.PictureProcessor;
import org.sleuthkit.datamodel.Score;
/**
* Extracts EXIF metadata from JPEG, TIFF, and WAV files. Currently only date,
* latitude, longitude, altitude, device model, and device make are extracted.
*
*
* User content suspected artifacts are also created by this processor.
*/
@ServiceProvider(service = PictureProcessor.class)
@ -143,16 +145,18 @@ public class EXIFProcessor implements PictureProcessor {
if (context.fileIngestIsCancelled()) {
return;
}
final Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
if (!attributes.isEmpty() && !blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) {
final BlackboardArtifact exifArtifact = file.newDataArtifact(new BlackboardArtifact.Type(TSK_METADATA_EXIF), attributes);
final BlackboardArtifact userSuspectedArtifact = file.newAnalysisResult(
new BlackboardArtifact.Type(TSK_USER_CONTENT_SUSPECTED), Score.SCORE_UNKNOWN, null, null, null,
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, Bundle.ExifProcessor_userContent_description())))
.getAnalysisResult();
final BlackboardArtifact exifArtifact = file.newArtifact(TSK_METADATA_EXIF);
final BlackboardArtifact userSuspectedArtifact = file.newArtifact(TSK_USER_CONTENT_SUSPECTED);
exifArtifact.addAttributes(attributes);
userSuspectedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
MODULE_NAME, Bundle.ExifProcessor_userContent_description()));
try {
// index the artifact for keyword search
blackboard.postArtifact(exifArtifact, MODULE_NAME);

View File

@ -345,8 +345,7 @@ public class PlasoIngestModule implements DataSourceIngestModule {
eventType.getTypeID()));
try {
BlackboardArtifact bbart = resolvedFile.newArtifact(TSK_TL_EVENT);
bbart.addAttributes(bbattributes);
BlackboardArtifact bbart = resolvedFile.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), bbattributes);
try {
/*
* Post the artifact which will index the artifact for

View File

@ -39,6 +39,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_YARA_
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_RULE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -112,7 +113,7 @@ final class YaraIngestHelper {
* Scan the given AbstractFile for yara rule matches from the rule sets in
* the given directory creating a blackboard artifact for each matching
* rule.
*
*
* @param file The Abstract File being processed.
* @param baseRuleSetDirectory Base directory of the compiled rule sets.
* @param localFile Local copy of file.
@ -141,7 +142,7 @@ final class YaraIngestHelper {
* Scan the given file byte array for rule matches using the YaraJNIWrapper
* API.
*
* @param fileBytes An array of the file data.
* @param fileBytes An array of the file data.
* @param ruleSetDirectory Base directory of the compiled rule sets.
*
* @return List of rules that match from the given file from the given rule
@ -162,15 +163,17 @@ final class YaraIngestHelper {
}
/**
* Scan the given file for rules that match from the given rule set directory.
*
* @param scanFile Locally stored file to scan.
* Scan the given file for rules that match from the given rule set
* directory.
*
* @param scanFile Locally stored file to scan.
* @param ruleSetDirectory Base directory of the compiled rule sets.
* @param timeout YARA Scanner timeout value.
*
* @return List of matching rules, if none were found the list will be empty.
*
* @throws YaraWrapperException
* @param timeout YARA Scanner timeout value.
*
* @return List of matching rules, if none were found the list will be
* empty.
*
* @throws YaraWrapperException
*/
private static List<String> scanFileForMatch(File scanFile, File ruleSetDirectory, int timeout) throws YaraWrapperException {
List<String> matchingRules = new ArrayList<>();
@ -198,13 +201,15 @@ final class YaraIngestHelper {
private static List<BlackboardArtifact> createArtifact(AbstractFile abstractFile, String ruleSetName, List<String> matchingRules) throws TskCoreException {
List<BlackboardArtifact> artifacts = new ArrayList<>();
for (String rule : matchingRules) {
BlackboardArtifact artifact = abstractFile.newArtifact(TSK_YARA_HIT);
List<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, ruleSetName));
attributes.add(new BlackboardAttribute(TSK_RULE, MODULE_NAME, rule));
artifact.addAttributes(attributes);
BlackboardArtifact artifact = abstractFile.newAnalysisResult(new BlackboardArtifact.Type(TSK_YARA_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
artifacts.add(artifact);
}
return artifacts;

View File

@ -34,6 +34,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -87,8 +88,9 @@ class StixArtifactData {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact bba = file.newArtifact(TSK_INTERESTING_FILE_HIT);
bba.addAttributes(attributes);
BlackboardArtifact bba = file.newAnalysisResult(
new BlackboardArtifact.Type(TSK_INTERESTING_FILE_HIT),
Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult();
try {
/*

View File

@ -23,10 +23,13 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter;
import org.joda.time.DateTime;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -93,7 +96,6 @@ final class CustomArtifactType {
* artifact to the blackboard.
*/
static BlackboardArtifact createAndPostInstance(Content source) throws TskCoreException, Blackboard.BlackboardException {
BlackboardArtifact artifact = source.newArtifact(artifactType.getTypeID());
List<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0));
attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0));
@ -102,7 +104,6 @@ final class CustomArtifactType {
attributes.add(new BlackboardAttribute(bytesAttrType, MODULE_NAME, DatatypeConverter.parseHexBinary("ABCD")));
attributes.add(new BlackboardAttribute(stringAttrType, MODULE_NAME, "Zero"));
attributes.add(new BlackboardAttribute(jsonAttrType, MODULE_NAME, "{\"fruit\": \"Apple\",\"size\": \"Large\",\"color\": \"Red\"}"));
artifact.addAttributes(attributes);
/*
* Add a second source module to the attributes. Try to do it twice. The
@ -113,6 +114,28 @@ final class CustomArtifactType {
attr.addSource(ADDITIONAL_MODULE_NAME);
}
BlackboardArtifact artifact;
if (artifactType.getCategory() == null) {
throw new TskCoreException(String.format("Artifact type: %s has no category.",
artifactType.getDisplayName(), artifactType.getCategory().getDisplayName()));
}
switch (artifactType.getCategory()) {
case DATA_ARTIFACT:
artifact = source.newDataArtifact(artifactType, attributes);
break;
case ANALYSIS_RESULT:
artifact = source.newAnalysisResult(artifactType, Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
break;
default:
throw new TskCoreException(String.format("Artifact type: %s has no known category: %s",
artifactType.getDisplayName(), artifactType.getCategory().getDisplayName()));
}
Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard();
blackboard.postArtifact(artifact, MODULE_NAME);

View File

@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.test;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
@ -31,6 +32,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -77,11 +79,11 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
int randomArtIndex = (int) (Math.random() * 3);
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getArtifactsBlackboard();
BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]);
BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID());
Collection<BlackboardAttribute> baseAttributes = new ArrayList<>();
String commentTxt;
BlackboardAttribute baseAttr;
switch (artifactBase.getArtifactTypeID()) {
switch (artifactTypeBase.getTypeID()) {
case 2:
commentTxt = "www.placeholderWebsiteDOTCOM";
baseAttr = new BlackboardAttribute(
@ -110,8 +112,20 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
commentTxt = "DEPENDENT ON ARTIFACT TYPE";
break;
}
artifactBase.addAttributes(baseAttributes);
BlackboardArtifact artifact = file.newArtifact(artifactType.getTypeID());
BlackboardArtifact artifactBase;
switch (artifactTypeBase.getCategory()) {
case DATA_ARTIFACT:
artifactBase = file.newDataArtifact(artifactTypeBase, baseAttributes);
break;
case ANALYSIS_RESULT:
artifactBase = file.newAnalysisResult(artifactTypeBase, Score.SCORE_UNKNOWN, null, null, null, baseAttributes)
.getAnalysisResult();
break;
default:
throw new IllegalArgumentException("Unknown category type: " + artifactTypeBase.getCategory().getDisplayName());
}
Collection<BlackboardAttribute> attributes = new ArrayList<>();
BlackboardAttribute att = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, "ArtifactsAndTxt");
@ -121,7 +135,19 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
attributes.add(att2);
attributes.add(att3);
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, artifactBase.getArtifactID()));
artifact.addAttributes(attributes);
switch (artifactType.getCategory()) {
case DATA_ARTIFACT:
file.newDataArtifact(artifactType, attributes);
break;
case ANALYSIS_RESULT:
file.newAnalysisResult(artifactType, Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
break;
default:
throw new IllegalArgumentException("Unknown category type: " + artifactType.getCategory().getDisplayName());
}
} catch (TskCoreException | NoCurrentCaseException ex) {
logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex);
return ProcessResult.ERROR;

View File

@ -22,7 +22,7 @@ import java.awt.Dialog;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import static java.util.Arrays.asList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import java.util.logging.Level;
@ -142,9 +142,7 @@ public class AddManualEvent extends Action {
try {
//Use the current examiners name plus a fixed string as the source / module name.
String source = MANUAL_CREATION + ": " + sleuthkitCase.getCurrentExaminer().getLoginName();
BlackboardArtifact artifact = sleuthkitCase.newBlackboardArtifact(TSK_TL_EVENT, eventInfo.datasource.getId());
artifact.addAttributes(asList(
List<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_TL_EVENT_TYPE, source,
TimelineEventType.USER_CREATED.getTypeID()),
@ -154,7 +152,10 @@ public class AddManualEvent extends Action {
new BlackboardAttribute(
TSK_DATETIME, source,
eventInfo.time)
));
);
BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null);
try {
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
} catch (Blackboard.BlackboardException ex) {

View File

@ -19,8 +19,9 @@
package org.sleuthkit.autopsy.experimental.objectdetection;
import java.io.File;
import java.util.Collections;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import org.apache.commons.io.FilenameUtils;
@ -43,12 +44,12 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_OBJECT_DETECTED;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -163,14 +164,16 @@ public class ObjectDetectectionFileIngestModule extends FileIngestModuleAdapter
if (!detectionRectangles.empty()) {
//if any detections occurred create an artifact for this classifier and file combination
try {
BlackboardArtifact artifact = file.newArtifact(TSK_OBJECT_DETECTED);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
MODULE_NAME,
classifierKey));
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
MODULE_NAME,
Bundle.ObjectDetectionFileIngestModule_classifierDetection_text((int) detectionRectangles.size().height)));
List<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION, MODULE_NAME, classifierKey),
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
Bundle.ObjectDetectionFileIngestModule_classifierDetection_text((int) detectionRectangles.size().height))
);
BlackboardArtifact artifact = file.newAnalysisResult(
new BlackboardArtifact.Type(TSK_OBJECT_DETECTED), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
/*
* Index the artifact for keyword search.

View File

@ -46,6 +46,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.EncodingType;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@ -384,8 +385,10 @@ class VolatilityProcessor {
// Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(resolvedFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact volArtifact = resolvedFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
volArtifact.addAttributes(attributes);
BlackboardArtifact volArtifact = resolvedFile.newAnalysisResult(
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT),
Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
try {
// index the artifact for keyword search

View File

@ -199,9 +199,6 @@ class GPXParserFileIngestModule(FileIngestModule):
for waypoint in gpx.waypoints:
try:
art = file.newArtifact(
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
attributes = ArrayList()
attributes.add(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), self.moduleName, waypoint.latitude))
@ -213,7 +210,8 @@ class GPXParserFileIngestModule(FileIngestModule):
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self.moduleName, waypoint.name))
attributes.add(BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), self.moduleName, "GPXParser"))
art.addAttributes(attributes)
art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
self.blackboard.postArtifact(art, self.moduleName)

View File

@ -95,11 +95,11 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
longitude = Double.valueOf(resultSet.getString("longitude"))
attributes = ArrayList()
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME, "Browser Location History"))
artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(),moduleName, accuracy))
# NOTE: originally commented out

View File

@ -91,14 +91,13 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
i = i + 1
attributes = ArrayList()
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, general.MODULE_NAME, latitude))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, general.MODULE_NAME, longitude))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, general.MODULE_NAME, timestamp))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, general.MODULE_NAME,
abstractFile.getName() + " Location History"))
artifact.addAttributes(attributes)
artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
#Not storing these for now.
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy))
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence))

View File

@ -86,7 +86,6 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
altitude = poisResultSet.getDouble("poialt")
attributes = ArrayList()
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK)
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, self._MODULE_NAME, time))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, self._MODULE_NAME, latitude))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, self._MODULE_NAME, longitude))
@ -94,6 +93,8 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, self._MODULE_NAME, name))
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, self._MODULE_NAME, self._PROGRAM_NAME))
artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
artifact.addAttributes(attributes)
try:
# index the artifact for keyword search

View File

@ -129,9 +129,8 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
elif (not(not contacts_parser.get_contact_name() or contacts_parser.get_contact_name().isspace())):
current_case = Case.getCurrentCase().getSleuthkitCase()
attributes = ArrayList()
artifact = contacts_db.getDBFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT)
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name()))
artifact.addAttributes(attributes)
artifact = contacts_db.getDBFile().newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes)
# Post the artifact to blackboard
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME)

View File

@ -569,8 +569,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
}
if (!attributes.isEmpty()) {
try {
BlackboardArtifact bbart = aFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA);
bbart.addAttributes(attributes);
BlackboardArtifact bbart = aFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA), attributes);
bbartifacts.add(bbart);
} catch (TskCoreException ex) {
// Log error and return to continue processing

View File

@ -40,6 +40,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskException;
@ -236,14 +237,6 @@ class LuceneQuery implements KeywordSearchQuery {
final String MODULE_NAME = KeywordSearchModuleFactory.getModuleName();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
BlackboardArtifact bba;
try {
bba = content.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
} catch (TskCoreException e) {
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
return null;
}
if (snippet != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_PREVIEW, MODULE_NAME, snippet));
}
@ -270,10 +263,10 @@ class LuceneQuery implements KeywordSearchQuery {
);
try {
bba.addAttributes(attributes); //write out to bb
return bba;
return content.newAnalysisResult(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_KEYWORD_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
} catch (TskCoreException e) {
logger.log(Level.WARNING, "Error adding bb attributes to artifact", e); //NON-NLS
logger.log(Level.WARNING, "Error adding bb artifact for keyword hit", e); //NON-NLS
return null;
}
}

View File

@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -590,19 +591,11 @@ final class RegexQuery implements KeywordSearchQuery {
* Create a "plain vanilla" keyword hit artifact with keyword and regex
* attributes
*/
BlackboardArtifact newArtifact;
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, foundKeyword.getSearchTerm()));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP, MODULE_NAME, getQueryString()));
try {
newArtifact = content.newArtifact(ARTIFACT_TYPE.TSK_KEYWORD_HIT);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Error adding artifact for keyword hit to blackboard", ex); //NON-NLS
return null;
}
if (StringUtils.isNotBlank(listName)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME, listName));
}
@ -621,8 +614,8 @@ final class RegexQuery implements KeywordSearchQuery {
}
try {
newArtifact.addAttributes(attributes);
return newArtifact;
return content.newAnalysisResult(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_KEYWORD_HIT), Score.SCORE_UNKNOWN, null, null, null, attributes)
.getAnalysisResult();
} catch (TskCoreException e) {
LOGGER.log(Level.SEVERE, "Error adding bb attributes for terms search artifact", e); //NON-NLS
return null;

View File

@ -31,6 +31,7 @@ import java.nio.charset.Charset;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@ -540,22 +541,16 @@ final class ChromeCacheExtractor {
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID,
moduleName, cachedItemFile.getId()));
Optional<Long> optional = cacheEntryFile.getOsAccountObjectId();
OsAccount account = null;
if(optional.isPresent()) {
account = currentCase.getSleuthkitCase().getOsAccountManager().getOsAccountByObjectId(optional.get());
}
BlackboardArtifact webCacheArtifact = cacheEntryFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_CACHE), webAttr, account);
BlackboardArtifact webCacheArtifact = cacheEntryFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WEB_CACHE), webAttr);
artifactsAdded.add(webCacheArtifact);
// Create a TSK_ASSOCIATED_OBJECT on the f_XXX or derived file file back to the CACHE entry
BlackboardArtifact associatedObjectArtifact = cachedItemFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
if (associatedObjectArtifact != null) {
associatedObjectArtifact.addAttribute(
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
moduleName, webCacheArtifact.getArtifactID()));
artifactsAdded.add(associatedObjectArtifact);
}
BlackboardArtifact associatedObjectArtifact = cachedItemFile.newDataArtifact(
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT),
Arrays.asList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
moduleName, webCacheArtifact.getArtifactID())));
artifactsAdded.add(associatedObjectArtifact);
}
/**

View File

@ -52,6 +52,7 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOC
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.OsAccount;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -159,13 +160,13 @@ abstract class Extract {
* @throws TskCoreException
*/
BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException {
Optional<OsAccount> optional = getOsAccount(content);
if (optional.isPresent() && type.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) {
return content.newDataArtifact(type, attributes, optional.get());
} else {
BlackboardArtifact bbart = content.newArtifact(type.getTypeID());
bbart.addAttributes(attributes);
return bbart;
switch (type.getCategory()) {
case DATA_ARTIFACT:
return content.newDataArtifact(type, attributes);
case ANALYSIS_RESULT:
return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult();
default:
throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName());
}
}
@ -537,28 +538,4 @@ abstract class Extract {
return tempFile;
}
/**
* Return the appropriate OsAccount for the given file.
*
* @param file
*
* @return An Optional OsACcount object.
*
* @throws TskCoreException
*/
Optional<OsAccount> getOsAccount(Content content) throws TskCoreException {
if(content instanceof AbstractFile) {
if(osAccountCache == null) {
Optional<Long> accountId = ((AbstractFile)content).getOsAccountObjectId();
if(accountId.isPresent()) {
return Optional.ofNullable(tskCase.getOsAccountManager().getOsAccountByObjectId(accountId.get()));
}
return Optional.empty();
}
return osAccountCache.getOsAccount(((AbstractFile)content));
}
return Optional.empty();
}
}

View File

@ -742,11 +742,11 @@ class ExtractRegistry extends Extract {
} else {
results.get(0).addAttributes(bbattributes);
}
for (Map.Entry userMap : getUserNameMap().entrySet()) {
for (Map.Entry<String, String> userMap : getUserNameMap().entrySet()) {
String sid = "";
try{
sid = (String)userMap.getKey();
String userName = (String)userMap.getValue();
sid = userMap.getKey();
String userName = userMap.getValue();
createOrUpdateOsAccount(regFile, sid, userName, null);
} catch(TskCoreException | TskDataException | NotUserSIDException ex) {
logger.log(Level.WARNING, String.format("Failed to update Domain for existing OsAccount: %s, sid: %s", regFile.getId(), sid), ex);
@ -815,9 +815,7 @@ class ExtractRegistry extends Extract {
try {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, parentModuleName, value));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, itemMtime));
BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_DELETED_PROG);
bbart.addAttributes(bbattributes);
BlackboardArtifact bbart = regFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_DELETED_PROG), bbattributes);
newArtifacts.add(bbart);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding installed program artifact to blackboard.", ex); //NON-NLS
@ -827,7 +825,6 @@ class ExtractRegistry extends Extract {
String officeName = artnode.getAttribute("name"); //NON-NLS
try {
BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_RECENT_OBJECT);
// @@@ BC: Consider removing this after some more testing. It looks like an Mtime associated with the root key and not the individual item
if (mtime != null) {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, parentModuleName, mtime));
@ -835,8 +832,8 @@ class ExtractRegistry extends Extract {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, parentModuleName, officeName));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, parentModuleName, value));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, parentModuleName, artnode.getNodeName()));
bbart.addAttributes(bbattributes);
BlackboardArtifact bbart = regFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_RECENT_OBJECT), bbattributes);
newArtifacts.add(bbart);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard.", ex); //NON-NLS
@ -874,12 +871,12 @@ class ExtractRegistry extends Extract {
try {
String localPath = artnode.getAttribute("localPath"); //NON-NLS
String remoteName = value;
BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_REMOTE_DRIVE);
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LOCAL_PATH,
parentModuleName, localPath));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REMOTE_PATH,
parentModuleName, remoteName));
bbart.addAttributes(bbattributes);
BlackboardArtifact bbart = regFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_REMOTE_DRIVE), bbattributes);
newArtifacts.add(bbart);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding network artifact to blackboard.", ex); //NON-NLS
@ -893,8 +890,7 @@ class ExtractRegistry extends Extract {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SSID, parentModuleName, value));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, lastWriteTime));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID, parentModuleName, adapter));
BlackboardArtifact bbart = regFile.newArtifact(ARTIFACT_TYPE.TSK_WIFI_NETWORK);
bbart.addAttributes(bbattributes);
BlackboardArtifact bbart = regFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_WIFI_NETWORK), bbattributes);
newArtifacts.add(bbart);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error adding SSID artifact to blackboard.", ex); //NON-NLS

View File

@ -1,5 +1,5 @@
#Updated by build script
#Mon, 25 Jan 2021 12:41:22 -0500
#Wed, 28 Apr 2021 08:03:47 -0400
LBL_splash_window_title=Starting Autopsy
SPLASH_HEIGHT=314
SPLASH_WIDTH=538

View File

@ -1,4 +1,4 @@
#Updated by build script
#Mon, 25 Jan 2021 12:41:22 -0500
#Wed, 28 Apr 2021 08:03:47 -0400
CTL_MainWindow_Title=Autopsy 4.18.0
CTL_MainWindow_Title_No_Project=Autopsy 4.18.0

View File

@ -52,7 +52,9 @@ from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.autopsy.casemodule.services import Services
from org.sleuthkit.autopsy.casemodule.services import FileManager
from org.sleuthkit.autopsy.casemodule.services import Blackboard
from org.sleuthkit.autopsy.casemodule.services import Blackboard
from org.sleuthkit.datamodel import Score
from java.util import ArrayList
# Factory that defines the name and details of the module and allows Autopsy
# to create instances of the modules that will do the analysis.
@ -138,9 +140,9 @@ class SampleJythonDataSourceIngestModule(DataSourceIngestModule):
# Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of
# artfiact. Refer to the developer docs for other examples.
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file")
art.addAttribute(att)
attrs = ArrayList()
attrs.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, SampleJythonDataSourceIngestModuleFactory.moduleName, "Test file"))
art = file.newAnalysisResult(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Score.SCORE_UNKNOWN, None, None, None, attrs)
try:
# index the artifact for keyword search

View File

@ -54,6 +54,8 @@ from org.sleuthkit.autopsy.casemodule import Case
from org.sleuthkit.autopsy.casemodule.services import Services
from org.sleuthkit.autopsy.casemodule.services import FileManager
from org.sleuthkit.autopsy.casemodule.services import Blackboard
from org.sleuthkit.datamodel import Score
from java.util import ArrayList
# Factory that defines the name and details of the module and allows Autopsy
# to create instances of the modules that will do the anlaysis.
@ -125,10 +127,11 @@ class SampleJythonFileIngestModule(FileIngestModule):
# Make an artifact on the blackboard. TSK_INTERESTING_FILE_HIT is a generic type of
# artifact. Refer to the developer docs for other examples.
art = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT)
att = BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
SampleJythonFileIngestModuleFactory.moduleName, "Text Files")
art.addAttribute(att)
attrs = ArrayList()
attrs.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME,
SampleJythonFileIngestModuleFactory.moduleName, "Text Files"))
art = file.newAnalysisResult(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Score.SCORE_UNKNOWN, None, None, None, attrs)
try:
# index the artifact for keyword search

View File

@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.thunderbirdparser;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -59,6 +60,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.Relationship;
import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskDataException;
@ -240,8 +242,15 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
// encrypted pst: Add encrypted file artifact
try {
BlackboardArtifact artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
artifact.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, EmailParserModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "ThunderbirdMboxFileIngestModule.encryptionFileLevel")));
BlackboardArtifact artifact = abstractFile.newAnalysisResult(
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED),
Score.SCORE_UNKNOWN, null, null, null, Arrays.asList(
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME,
EmailParserModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"ThunderbirdMboxFileIngestModule.encryptionFileLevel"))
))
.getAnalysisResult();
try {
// index the artifact for keyword search
@ -759,8 +768,9 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
return null;
}
bbart = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG);
bbart.addAttributes(bbattributes);
bbart = abstractFile.newDataArtifact(
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG),
bbattributes);
if (context.fileIngestIsCancelled()) {
return null;

View File

@ -223,10 +223,9 @@ final class VcardParser {
try {
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT, attributes)) {
artifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT);
artifact.addAttributes(attributes);
artifact = abstractFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes);
extractPhotos(vcard, abstractFile, artifact);
extractPhotos(vcard, abstractFile, artifact);
// Add account relationships.
if (deviceAccountInstance != null) {