mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-06 21:00:22 +00:00
Merge pull request #7395 from rcordovano/7895-cr-data-artifact-ingest-module
7895 CR data artifact ingest module
This commit is contained in:
commit
75223b89f9
@ -12,6 +12,7 @@ CentralRepoDbChoice.PostgreSQL.Text=Custom PostgreSQL
|
||||
CentralRepoDbChoice.PostgreSQL_Multiuser.Text=PostgreSQL using multi-user settings
|
||||
CentralRepoDbChoice.Sqlite.Text=SQLite
|
||||
CentralRepoDbManager.connectionErrorMsg.text=Failed to connect to central repository database.
|
||||
CentralRepositoryService.progressMsg.startingListener=Starting events listener...
|
||||
CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...
|
||||
CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database....
|
||||
CentralRepositoryService.serviceName=Central Repository Service
|
||||
|
@ -310,18 +310,25 @@ public interface CentralRepository {
|
||||
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValues(CorrelationAttributeInstance.Type aType, List<String> values) throws CentralRepoException, CorrelationAttributeNormalizationException;
|
||||
|
||||
/**
|
||||
* Retrieves eamArtifact instances from the database that are associated
|
||||
* with the eamArtifactType and eamArtifactValue of the given eamArtifact.
|
||||
* Retrieves correlation attribute instances from the central repository
|
||||
* that match a given attribute type and value.
|
||||
*
|
||||
* @param aType The type of the artifact
|
||||
* @param value The correlation value
|
||||
* @param type The correlation attribute type.
|
||||
* @param value The correlation attribute value.
|
||||
*
|
||||
* @return List of artifact instances for a given type/value
|
||||
* @return The matching correlation attribute instances.
|
||||
*
|
||||
* @throws CorrelationAttributeNormalizationException
|
||||
* @throws CentralRepoException
|
||||
* @throws CorrelationAttributeNormalizationException The exception is
|
||||
* thrown if the supplied
|
||||
* correlation attribute
|
||||
* value cannot be
|
||||
* normlaized.
|
||||
* @throws CentralRepoException The exception is
|
||||
* thrown if there is an
|
||||
* error querying the
|
||||
* central repository.
|
||||
*/
|
||||
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type aType, String value) throws CentralRepoException, CorrelationAttributeNormalizationException;
|
||||
List<CorrelationAttributeInstance> getArtifactInstancesByTypeValue(CorrelationAttributeInstance.Type type, String value) throws CentralRepoException, CorrelationAttributeNormalizationException;
|
||||
|
||||
/**
|
||||
* Retrieves eamArtifact instances from the database that are associated
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2018-2020 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -24,7 +24,6 @@ import org.sleuthkit.autopsy.appservices.AutopsyService;
|
||||
import org.sleuthkit.autopsy.progress.ProgressIndicator;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.centralrepository.eventlisteners.CaseEventListener;
|
||||
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
@ -36,8 +35,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
public class CentralRepositoryService implements AutopsyService {
|
||||
|
||||
private CaseEventListener caseEventListener = new CaseEventListener();
|
||||
private IngestEventsListener ingestEventListener = new IngestEventsListener();
|
||||
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages({
|
||||
"CentralRepositoryService.serviceName=Central Repository Service"
|
||||
@ -47,7 +45,8 @@ public class CentralRepositoryService implements AutopsyService {
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates..."
|
||||
"CentralRepositoryService.progressMsg.updatingSchema=Checking for schema updates...",
|
||||
"CentralRepositoryService.progressMsg.startingListener=Starting events listener..."
|
||||
})
|
||||
@Override
|
||||
public void openCaseResources(CaseContext context) throws AutopsyServiceException {
|
||||
@ -58,21 +57,20 @@ public class CentralRepositoryService implements AutopsyService {
|
||||
ProgressIndicator progress = context.getProgressIndicator();
|
||||
progress.progress(Bundle.CentralRepositoryService_progressMsg_updatingSchema());
|
||||
updateSchema();
|
||||
|
||||
if (context.cancelRequested()) {
|
||||
return;
|
||||
}
|
||||
|
||||
dataUpgradeForVersion1dot2(context.getCase());
|
||||
|
||||
if (context.cancelRequested()) {
|
||||
return;
|
||||
}
|
||||
|
||||
progress.progress(Bundle.CentralRepositoryService_progressMsg_startingListener());
|
||||
caseEventListener = new CaseEventListener();
|
||||
caseEventListener.installListeners();
|
||||
|
||||
ingestEventListener = new IngestEventsListener();
|
||||
ingestEventListener.installListeners();
|
||||
|
||||
caseEventListener.startUp();
|
||||
}
|
||||
|
||||
|
||||
@NbBundle.Messages({
|
||||
"CentralRepositoryService.progressMsg.waitingForListeners=Finishing adding data to central repository database...."
|
||||
})
|
||||
@ -80,22 +78,16 @@ public class CentralRepositoryService implements AutopsyService {
|
||||
public void closeCaseResources(CaseContext context) throws AutopsyServiceException {
|
||||
ProgressIndicator progress = context.getProgressIndicator();
|
||||
progress.progress(Bundle.CentralRepositoryService_progressMsg_waitingForListeners());
|
||||
|
||||
if (caseEventListener != null) {
|
||||
caseEventListener.uninstallListeners();
|
||||
caseEventListener.shutdown();
|
||||
}
|
||||
|
||||
if (ingestEventListener != null) {
|
||||
ingestEventListener.uninstallListeners();
|
||||
ingestEventListener.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the central repository schema to the latest version.
|
||||
* Updates the central repository database schema to the latest version.
|
||||
*
|
||||
* @throws AutopsyServiceException
|
||||
* @throws AutopsyServiceException The exception is thrown if there is an
|
||||
* error updating the database schema.
|
||||
*/
|
||||
private void updateSchema() throws AutopsyServiceException {
|
||||
try {
|
||||
@ -107,10 +99,11 @@ public class CentralRepositoryService implements AutopsyService {
|
||||
|
||||
/**
|
||||
* Adds missing data source object IDs from data sources in this case to the
|
||||
* corresponding records in the central repository. This is a data update to
|
||||
* go with the v1.2 schema update.
|
||||
* corresponding records in the central repository database. This is a data
|
||||
* update to go with the v1.2 schema update.
|
||||
*
|
||||
* @throws AutopsyServiceException
|
||||
* @throws AutopsyServiceException The exception is thrown if there is an
|
||||
* error updating the database.
|
||||
*/
|
||||
private void dataUpgradeForVersion1dot2(Case currentCase) throws AutopsyServiceException {
|
||||
try {
|
||||
|
@ -40,7 +40,6 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.HashUtility;
|
||||
import org.sleuthkit.datamodel.InvalidAccountIDException;
|
||||
import org.sleuthkit.datamodel.OsAccount;
|
||||
@ -114,8 +113,54 @@ public class CorrelationAttributeUtil {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
public static List<CorrelationAttributeInstance> makeCorrAttrsToSave(OsAccountInstance osAccountInstance) {
|
||||
return makeCorrAttrsForSearch(osAccountInstance);
|
||||
/**
|
||||
* Gets the correlation attributes for an OS account instance represented as
|
||||
* an OS account plus a data source.
|
||||
*
|
||||
* @param account The OS account.
|
||||
* @param dataSource The data source.
|
||||
*
|
||||
* @return The correlation attributes.
|
||||
*/
|
||||
public static List<CorrelationAttributeInstance> makeCorrAttrsToSave(OsAccount account, Content dataSource) {
|
||||
List<CorrelationAttributeInstance> correlationAttrs = new ArrayList<>();
|
||||
if (CentralRepository.isEnabled()) {
|
||||
Optional<String> accountAddr = account.getAddr();
|
||||
if (accountAddr.isPresent() && !isSystemOsAccount(accountAddr.get())) {
|
||||
try {
|
||||
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
|
||||
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
|
||||
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
|
||||
accountAddr.get(),
|
||||
correlationCase,
|
||||
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
|
||||
dataSource.getName(),
|
||||
"",
|
||||
TskData.FileKnown.KNOWN,
|
||||
account.getId());
|
||||
correlationAttrs.add(correlationAttributeInstance);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error querying central repository for OS account '%s'", accountAddr.get()), ex); //NON-NLS
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error getting current case for OS account '%s'", accountAddr.get()), ex); //NON-NLS
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error normalizing correlation attribute for OS account '%s'", accountAddr.get()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
return correlationAttrs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines whether or not a given OS account address is a system account
|
||||
* address.
|
||||
*
|
||||
* @param accountAddr The OS account address.
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
private static boolean isSystemOsAccount(String accountAddr) {
|
||||
return accountAddr.equals("S-1-5-18") || accountAddr.equals("S-1-5-19") || accountAddr.equals("S-1-5-20");
|
||||
}
|
||||
|
||||
/**
|
||||
@ -787,43 +832,11 @@ public class CorrelationAttributeUtil {
|
||||
|
||||
public static List<CorrelationAttributeInstance> makeCorrAttrsForSearch(OsAccountInstance osAccountInst) {
|
||||
List<CorrelationAttributeInstance> correlationAttrs = new ArrayList<>();
|
||||
if (CentralRepository.isEnabled()) {
|
||||
OsAccount account = null;
|
||||
DataSource dataSource = null;
|
||||
if (osAccountInst != null) {
|
||||
try {
|
||||
account = osAccountInst.getOsAccount();
|
||||
dataSource = osAccountInst.getDataSource();
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting information from OsAccountInstance.", ex);
|
||||
}
|
||||
}
|
||||
if (account != null && dataSource != null) {
|
||||
Optional<String> accountAddr = account.getAddr();
|
||||
// Check address if it is null or one of the ones below we want to ignore it since they will always be one a windows system
|
||||
// and they are not unique
|
||||
if (accountAddr.isPresent() && !accountAddr.get().equals("S-1-5-18") && !accountAddr.get().equals("S-1-5-19") && !accountAddr.get().equals("S-1-5-20")) {
|
||||
try {
|
||||
|
||||
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
|
||||
CorrelationAttributeInstance correlationAttributeInstance = new CorrelationAttributeInstance(
|
||||
CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.OSACCOUNT_TYPE_ID),
|
||||
accountAddr.get(),
|
||||
correlationCase,
|
||||
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource),
|
||||
dataSource.getName(),
|
||||
"",
|
||||
TskData.FileKnown.KNOWN,
|
||||
account.getId());
|
||||
correlationAttrs.add(correlationAttributeInstance);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
logger.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
if (CentralRepository.isEnabled() && osAccountInst != null) {
|
||||
try {
|
||||
correlationAttrs.addAll(makeCorrAttrsToSave(osAccountInst.getOsAccount(), osAccountInst.getDataSource()));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error getting OS account from OS account instance '%s'", osAccountInst), ex);
|
||||
}
|
||||
}
|
||||
return correlationAttrs;
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2015-2019 Basis Technology Corp.
|
||||
* Copyright 2015-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -98,32 +98,33 @@ public class CorrelationDataSource implements Serializable {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a CorrelationDataSource object from a TSK Content object. This
|
||||
* will add it to the central repository.
|
||||
* Creates a central repository data source object from a case database data
|
||||
* source. If the data source is not already present in the central
|
||||
* repository, it is added.
|
||||
*
|
||||
* @param correlationCase the current CorrelationCase used for ensuring
|
||||
* uniqueness of DataSource
|
||||
* @param dataSource the sleuthkit datasource that is being added to
|
||||
* the central repository
|
||||
* @param correlationCase The central repository case associated with the
|
||||
* data aosurce.
|
||||
* @param dataSource The case database data source.
|
||||
*
|
||||
* @return
|
||||
* @return The central repository data source.
|
||||
*
|
||||
* @throws CentralRepoException
|
||||
* @throws CentralRepoException This exception is thrown if there is an
|
||||
* error creating the central repository data
|
||||
* source.
|
||||
*/
|
||||
public static CorrelationDataSource fromTSKDataSource(CorrelationCase correlationCase, Content dataSource) throws CentralRepoException {
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
throw new CentralRepoException(String.format("Central repository is not enabled, cannot create central repository data source for '%s'", dataSource));
|
||||
}
|
||||
|
||||
Case curCase;
|
||||
try {
|
||||
curCase = Case.getCurrentCaseThrows();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
throw new CentralRepoException("Autopsy case is closed");
|
||||
}
|
||||
|
||||
CorrelationDataSource correlationDataSource = null;
|
||||
boolean useCR = CentralRepository.isEnabled();
|
||||
if (useCR) {
|
||||
correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId());
|
||||
throw new CentralRepoException("Error getting current case", ex);
|
||||
}
|
||||
|
||||
CorrelationDataSource correlationDataSource = CentralRepository.getInstance().getDataSource(correlationCase, dataSource.getId());
|
||||
if (correlationDataSource == null) {
|
||||
String deviceId;
|
||||
String md5 = null;
|
||||
@ -131,7 +132,7 @@ public class CorrelationDataSource implements Serializable {
|
||||
String sha256 = null;
|
||||
try {
|
||||
deviceId = curCase.getSleuthkitCase().getDataSource(dataSource.getId()).getDeviceId();
|
||||
|
||||
|
||||
if (dataSource instanceof Image) {
|
||||
Image image = (Image) dataSource;
|
||||
md5 = image.getMd5();
|
||||
@ -139,15 +140,12 @@ public class CorrelationDataSource implements Serializable {
|
||||
sha256 = image.getSha256();
|
||||
}
|
||||
} catch (TskDataException | TskCoreException ex) {
|
||||
throw new CentralRepoException("Error getting data source info: " + ex.getMessage());
|
||||
throw new CentralRepoException("Error getting data source info from case database", ex);
|
||||
}
|
||||
|
||||
correlationDataSource = new CorrelationDataSource(correlationCase, deviceId, dataSource.getName(), dataSource.getId(), md5, sha1, sha256);
|
||||
if (useCR) {
|
||||
//add the correlation data source to the central repository and fill in the Central repository data source id in the object
|
||||
correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource);
|
||||
}
|
||||
correlationDataSource = CentralRepository.getInstance().newDataSource(correlationDataSource);
|
||||
}
|
||||
|
||||
return correlationDataSource;
|
||||
}
|
||||
|
||||
@ -205,66 +203,68 @@ public class CorrelationDataSource implements Serializable {
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the MD5 hash value
|
||||
*/
|
||||
public String getMd5() {
|
||||
return (md5Hash == null ? "" : md5Hash);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the MD5 hash value and persist to the Central Repository if available.
|
||||
*
|
||||
* Set the MD5 hash value and persist to the Central Repository if
|
||||
* available.
|
||||
*
|
||||
* @param md5Hash The MD5 hash value.
|
||||
*
|
||||
* @throws CentralRepoException If there's an issue updating the Central
|
||||
Repository.
|
||||
* Repository.
|
||||
*/
|
||||
public void setMd5(String md5Hash) throws CentralRepoException {
|
||||
this.md5Hash = md5Hash;
|
||||
|
||||
|
||||
if (dataSourceObjectID != -1) {
|
||||
CentralRepository.getInstance().updateDataSourceMd5Hash(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the SHA-1 hash value
|
||||
*/
|
||||
public String getSha1() {
|
||||
return (sha1Hash == null ? "" : sha1Hash);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the SHA-1 hash value and persist to the Central Repository if
|
||||
* available.
|
||||
*
|
||||
*
|
||||
* @param sha1Hash The SHA-1 hash value.
|
||||
*/
|
||||
public void setSha1(String sha1Hash) throws CentralRepoException {
|
||||
this.sha1Hash = sha1Hash;
|
||||
|
||||
|
||||
if (dataSourceObjectID != -1) {
|
||||
CentralRepository.getInstance().updateDataSourceSha1Hash(this);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* @return the SHA-256 hash value
|
||||
*/
|
||||
public String getSha256() {
|
||||
return (sha256Hash == null ? "" : sha256Hash);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Set the SHA-256 hash value and persist to the Central Repository if
|
||||
* available.
|
||||
*
|
||||
*
|
||||
* @param sha256Hash The SHA-256 hash value.
|
||||
*/
|
||||
public void setSha256(String sha256Hash) throws CentralRepoException {
|
||||
this.sha256Hash = sha256Hash;
|
||||
|
||||
|
||||
if (dataSourceObjectID != -1) {
|
||||
CentralRepository.getInstance().updateDataSourceSha256Hash(this);
|
||||
}
|
||||
|
@ -1289,7 +1289,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
|
||||
if (values == null || values.isEmpty()) {
|
||||
throw new CorrelationAttributeNormalizationException("Cannot get artifact instances without specified values");
|
||||
}
|
||||
return getArtifactInstances(prepareGetInstancesSql(aType, values), aType);
|
||||
return getCorrAttrInstances(prepareGetInstancesSql(aType, values), aType);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1312,7 +1312,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
|
||||
inValuesBuilder.append(sql);
|
||||
inValuesBuilder.append(caseIds.stream().map(String::valueOf).collect(Collectors.joining("', '")));
|
||||
inValuesBuilder.append("')");
|
||||
return getArtifactInstances(inValuesBuilder.toString(), aType);
|
||||
return getCorrAttrInstances(inValuesBuilder.toString(), aType);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1361,40 +1361,44 @@ abstract class RdbmsCentralRepo implements CentralRepository {
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves eamArtifact instances from the database that are associated
|
||||
* with the eamArtifactType and eamArtifactValues of the given eamArtifact.
|
||||
* Retrieves correlation attribute instances from the central repository
|
||||
* that match a given SQL query and correlation attribute type.
|
||||
*
|
||||
* @param aType The type of the artifact
|
||||
* @param values The list of correlation values to get
|
||||
* CorrelationAttributeInstances for
|
||||
* @param sql The SQL query.
|
||||
* @param attrType The correlation attribute type.
|
||||
*
|
||||
* @return List of artifact instances for a given type with the specified
|
||||
* values
|
||||
* @return The correlation attribute instanes.
|
||||
*
|
||||
* @throws CorrelationAttributeNormalizationException
|
||||
* @throws CentralRepoException
|
||||
* @throws CorrelationAttributeNormalizationException The exception is
|
||||
* thrown if the supplied
|
||||
* correlation attribute
|
||||
* value cannot be
|
||||
* normlaized.
|
||||
* @throws CentralRepoException The exception is
|
||||
* thrown if there is an
|
||||
* error querying the
|
||||
* central repository.
|
||||
*/
|
||||
private List<CorrelationAttributeInstance> getArtifactInstances(String sql, CorrelationAttributeInstance.Type aType) throws CorrelationAttributeNormalizationException, CentralRepoException {
|
||||
private List<CorrelationAttributeInstance> getCorrAttrInstances(String sql, CorrelationAttributeInstance.Type attrType) throws CorrelationAttributeNormalizationException, CentralRepoException {
|
||||
List<CorrelationAttributeInstance> corrAttrs = new ArrayList<>();
|
||||
Connection conn = connect();
|
||||
List<CorrelationAttributeInstance> artifactInstances = new ArrayList<>();
|
||||
CorrelationAttributeInstance artifactInstance;
|
||||
PreparedStatement preparedStatement = null;
|
||||
ResultSet resultSet = null;
|
||||
try {
|
||||
preparedStatement = conn.prepareStatement(sql);
|
||||
resultSet = preparedStatement.executeQuery();
|
||||
while (resultSet.next()) {
|
||||
artifactInstance = getEamArtifactInstanceFromResultSet(resultSet, aType);
|
||||
artifactInstances.add(artifactInstance);
|
||||
CorrelationAttributeInstance corrAttr = getCorrAttrFromResultSet(resultSet, attrType);
|
||||
corrAttrs.add(corrAttr);
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
throw new CentralRepoException("Error getting artifact instances by artifactType and artifactValue.", ex); // NON-NLS
|
||||
throw new CentralRepoException(String.format("Error getting correlation attributes using query %s", sql), ex); // NON-NLS
|
||||
} finally {
|
||||
CentralRepoDbUtil.closeResultSet(resultSet);
|
||||
CentralRepoDbUtil.closeStatement(preparedStatement);
|
||||
CentralRepoDbUtil.closeConnection(conn);
|
||||
}
|
||||
return artifactInstances;
|
||||
return corrAttrs;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1509,7 +1513,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
|
||||
PreparedStatement preparedStatement = null;
|
||||
String tableName = CentralRepoDbUtil.correlationTypeToInstanceTableName(instance.getCorrelationType());
|
||||
ResultSet resultSet = null;
|
||||
|
||||
|
||||
try {
|
||||
if (correlationCaseId > 0 && sourceObjID != null && correlationDataSourceId > 0) {
|
||||
//The CorrelationCase is in the Central repository.
|
||||
@ -3643,7 +3647,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
|
||||
*
|
||||
* @throws SQLException when an expected column name is not in the resultSet
|
||||
*/
|
||||
private CorrelationAttributeInstance getEamArtifactInstanceFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException {
|
||||
private CorrelationAttributeInstance getCorrAttrFromResultSet(ResultSet resultSet, CorrelationAttributeInstance.Type aType) throws SQLException, CentralRepoException, CorrelationAttributeNormalizationException {
|
||||
if (null == resultSet) {
|
||||
return null;
|
||||
}
|
||||
|
@ -1,18 +1,8 @@
|
||||
caseeventlistener.evidencetag=Evidence
|
||||
CaseEventsListener.module.name=Central Repository
|
||||
CaseEventsListener.prevCaseComment.text=Users seen in previous cases
|
||||
CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)
|
||||
CentralRepositoryNotificationDialog.bulletHeader=This data is used to:
|
||||
CentralRepositoryNotificationDialog.bulletOne=Ignore common items (files, domains, and accounts)
|
||||
CentralRepositoryNotificationDialog.bulletThree=Create personas that group accounts
|
||||
CentralRepositoryNotificationDialog.bulletTwo=Identify where an item was previously seen
|
||||
CentralRepositoryNotificationDialog.finalRemarks=To limit what is stored, use the Central Repository options panel.
|
||||
CentralRepositoryNotificationDialog.header=Autopsy stores data about each case in its Central Repository.
|
||||
IngestEventsListener.ingestmodule.name=Central Repository
|
||||
IngestEventsListener.prevCaseComment.text=Previous Case:
|
||||
# {0} - typeName
|
||||
# {1} - count
|
||||
IngestEventsListener.prevCount.text=Number of previous {0}: {1}
|
||||
IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)
|
||||
IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)
|
||||
Installer.centralRepoUpgradeFailed.title=Central repository disabled
|
||||
|
@ -22,18 +22,13 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
@ -43,7 +38,6 @@ import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.DataSourceAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.DataSourceNameChangedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.OsAcctInstancesAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
@ -61,34 +55,20 @@ import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.OsAccount;
|
||||
import org.sleuthkit.datamodel.OsAccountInstance;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Listen for case events and update entries in the Central Repository database
|
||||
* accordingly
|
||||
* An Autopsy events listener for case events relevant to the central
|
||||
* repository.
|
||||
*/
|
||||
@Messages({"caseeventlistener.evidencetag=Evidence"})
|
||||
public final class CaseEventListener implements PropertyChangeListener {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(CaseEventListener.class.getName());
|
||||
private final ExecutorService jobProcessingExecutor;
|
||||
private static final String CASE_EVENT_THREAD_NAME = "Case-Event-Listener-%d";
|
||||
|
||||
private static final String CASE_EVENT_THREAD_NAME = "CR-Case-Event-Listener-%d";
|
||||
private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(
|
||||
Case.Events.CONTENT_TAG_ADDED, Case.Events.CONTENT_TAG_DELETED,
|
||||
Case.Events.BLACKBOARD_ARTIFACT_TAG_DELETED, Case.Events.BLACKBOARD_ARTIFACT_TAG_ADDED,
|
||||
@ -96,14 +76,29 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
Case.Events.DATA_SOURCE_ADDED,
|
||||
Case.Events.TAG_DEFINITION_CHANGED,
|
||||
Case.Events.CURRENT_CASE,
|
||||
Case.Events.DATA_SOURCE_NAME_CHANGED,
|
||||
Case.Events.OS_ACCT_INSTANCES_ADDED);
|
||||
Case.Events.DATA_SOURCE_NAME_CHANGED);
|
||||
private final ExecutorService jobProcessingExecutor;
|
||||
|
||||
/**
|
||||
* Contructs an Autopsy events listener for case events relevant to the
|
||||
* central repository.
|
||||
*/
|
||||
public CaseEventListener() {
|
||||
jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(CASE_EVENT_THREAD_NAME).build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts up the listener.
|
||||
*/
|
||||
public void startUp() {
|
||||
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Shuts down the listener.
|
||||
*/
|
||||
public void shutdown() {
|
||||
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
|
||||
ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor);
|
||||
}
|
||||
|
||||
@ -113,92 +108,73 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
return;
|
||||
}
|
||||
|
||||
CentralRepository dbManager;
|
||||
try {
|
||||
dbManager = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to get instance of db manager.", ex);
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// If any changes are made to which event types are handled the change
|
||||
// must also be made to CASE_EVENTS_OF_INTEREST.
|
||||
CentralRepository centralRepo;
|
||||
try {
|
||||
centralRepo = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to access central repository", ex);
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
* IMPORTANT: If any changes are made to which event types are handled,
|
||||
* the change must also be made to the contents of the
|
||||
* CASE_EVENTS_OF_INTEREST set.
|
||||
*/
|
||||
switch (Case.Events.valueOf(evt.getPropertyName())) {
|
||||
case CONTENT_TAG_ADDED:
|
||||
case CONTENT_TAG_DELETED: {
|
||||
jobProcessingExecutor.submit(new ContentTagTask(dbManager, evt));
|
||||
}
|
||||
break;
|
||||
|
||||
case CONTENT_TAG_DELETED:
|
||||
jobProcessingExecutor.submit(new ContentTagTask(centralRepo, evt));
|
||||
break;
|
||||
case BLACKBOARD_ARTIFACT_TAG_DELETED:
|
||||
case BLACKBOARD_ARTIFACT_TAG_ADDED: {
|
||||
jobProcessingExecutor.submit(new BlackboardTagTask(dbManager, evt));
|
||||
}
|
||||
break;
|
||||
|
||||
case DATA_SOURCE_ADDED: {
|
||||
jobProcessingExecutor.submit(new DataSourceAddedTask(dbManager, evt));
|
||||
}
|
||||
break;
|
||||
case TAG_DEFINITION_CHANGED: {
|
||||
case BLACKBOARD_ARTIFACT_TAG_ADDED:
|
||||
jobProcessingExecutor.submit(new ArtifactTagTask(centralRepo, evt));
|
||||
break;
|
||||
case DATA_SOURCE_ADDED:
|
||||
jobProcessingExecutor.submit(new DataSourceAddedTask(centralRepo, evt));
|
||||
break;
|
||||
case TAG_DEFINITION_CHANGED:
|
||||
jobProcessingExecutor.submit(new TagDefinitionChangeTask(evt));
|
||||
}
|
||||
break;
|
||||
case CURRENT_CASE: {
|
||||
jobProcessingExecutor.submit(new CurrentCaseTask(dbManager, evt));
|
||||
}
|
||||
break;
|
||||
case DATA_SOURCE_NAME_CHANGED: {
|
||||
jobProcessingExecutor.submit(new DataSourceNameChangedTask(dbManager, evt));
|
||||
}
|
||||
break;
|
||||
case OS_ACCT_INSTANCES_ADDED: {
|
||||
if (((AutopsyEvent) evt).getSourceType() == AutopsyEvent.SourceType.LOCAL) {
|
||||
jobProcessingExecutor.submit(new OsAccountInstancesAddedTask(dbManager, evt));
|
||||
}
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case CURRENT_CASE:
|
||||
jobProcessingExecutor.submit(new CurrentCaseTask(centralRepo, evt));
|
||||
break;
|
||||
case DATA_SOURCE_NAME_CHANGED:
|
||||
jobProcessingExecutor.submit(new DataSourceNameChangedTask(centralRepo, evt));
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Add all of our Case Event Listeners to the case.
|
||||
/**
|
||||
* Determines whether or not a tag has notable status.
|
||||
*
|
||||
* @param tag The tag.
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
public void installListeners() {
|
||||
Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
|
||||
}
|
||||
|
||||
/*
|
||||
* Remove all of our Case Event Listeners from the case.
|
||||
*/
|
||||
public void uninstallListeners() {
|
||||
Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, this);
|
||||
private static boolean isNotableTag(Tag tag) {
|
||||
return (tag != null && isNotableTagDefinition(tag.getName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the tag has a notable status.
|
||||
* Determines whether or not a tag definition calls for notable status.
|
||||
*
|
||||
* @param t The tag to use in determination.
|
||||
* @param tagDef The tag definition.
|
||||
*
|
||||
* @return Whether or not it is a notable tag.
|
||||
* @return True or false.
|
||||
*/
|
||||
private static boolean isNotableTag(Tag t) {
|
||||
return (t != null && isNotableTagName(t.getName()));
|
||||
private static boolean isNotableTagDefinition(TagName tagDef) {
|
||||
return (tagDef != null && TagsManager.getNotableTagDisplayNames().contains(tagDef.getDisplayName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the tag name has a notable status.
|
||||
*
|
||||
* @param t The tag name to use in determination.
|
||||
*
|
||||
* @return Whether or not it is a notable tag name.
|
||||
*/
|
||||
private static boolean isNotableTagName(TagName t) {
|
||||
return (t != null && TagsManager.getNotableTagDisplayNames().contains(t.getDisplayName()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Searches a list of tags for a tag with a notable status.
|
||||
* Searches a list of tags for a tag with notable status.
|
||||
*
|
||||
* @param tags The tags to search.
|
||||
*
|
||||
@ -208,7 +184,6 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
if (tags == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return tags.stream()
|
||||
.filter(CaseEventListener::isNotableTag)
|
||||
.findFirst()
|
||||
@ -216,24 +191,25 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the known status of a blackboard artifact in the central repository.
|
||||
* Sets the notable (known) status of a central repository correlation
|
||||
* attribute corresponding to an artifact.
|
||||
*
|
||||
* @param dbManager The central repo database.
|
||||
* @param bbArtifact The blackboard artifact to set known status.
|
||||
* @param knownStatus The new known status.
|
||||
* @param centralRepo The central repository.
|
||||
* @param artifact The artifact.
|
||||
* @param notableStatus The new notable status.
|
||||
*/
|
||||
private static void setArtifactKnownStatus(CentralRepository dbManager, BlackboardArtifact bbArtifact, TskData.FileKnown knownStatus) {
|
||||
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
|
||||
if (bbArtifact instanceof DataArtifact) {
|
||||
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) bbArtifact));
|
||||
} else if (bbArtifact instanceof AnalysisResult) {
|
||||
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) bbArtifact));
|
||||
private static void setArtifactKnownStatus(CentralRepository centralRepo, BlackboardArtifact artifact, TskData.FileKnown notableStatus) {
|
||||
List<CorrelationAttributeInstance> corrAttrInstances = new ArrayList<>();
|
||||
if (artifact instanceof DataArtifact) {
|
||||
corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((DataArtifact) artifact));
|
||||
} else if (artifact instanceof AnalysisResult) {
|
||||
corrAttrInstances.addAll(CorrelationAttributeUtil.makeCorrAttrsForSearch((AnalysisResult) artifact));
|
||||
}
|
||||
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
|
||||
for (CorrelationAttributeInstance corrAttrInstance : corrAttrInstances) {
|
||||
try {
|
||||
dbManager.setAttributeInstanceKnownStatus(eamArtifact, knownStatus);
|
||||
centralRepo.setAttributeInstanceKnownStatus(corrAttrInstance, notableStatus);
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database while setting artifact known status.", ex); //NON-NLS
|
||||
LOGGER.log(Level.SEVERE, String.format("Error setting correlation attribute instance known status", corrAttrInstance), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -359,12 +335,12 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
}
|
||||
}
|
||||
|
||||
private final class BlackboardTagTask implements Runnable {
|
||||
private final class ArtifactTagTask implements Runnable {
|
||||
|
||||
private final CentralRepository dbManager;
|
||||
private final PropertyChangeEvent event;
|
||||
|
||||
private BlackboardTagTask(CentralRepository db, PropertyChangeEvent evt) {
|
||||
private ArtifactTagTask(CentralRepository db, PropertyChangeEvent evt) {
|
||||
dbManager = db;
|
||||
event = evt;
|
||||
}
|
||||
@ -644,7 +620,6 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
*/
|
||||
if ((null == event.getOldValue()) && (event.getNewValue() instanceof Case)) {
|
||||
Case curCase = (Case) event.getNewValue();
|
||||
IngestEventsListener.resetCeModuleInstanceCount();
|
||||
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
return;
|
||||
@ -663,126 +638,6 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
} // CURRENT_CASE
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"CaseEventsListener.module.name=Central Repository",
|
||||
"CaseEventsListener.prevCaseComment.text=Users seen in previous cases",
|
||||
"CaseEventsListener.prevExists.text=Previously Seen Users (Central Repository)"})
|
||||
/**
|
||||
* Add OsAccount Instance to CR and find interesting items based on the
|
||||
* OsAccount
|
||||
*/
|
||||
private final class OsAccountInstancesAddedTask implements Runnable {
|
||||
|
||||
private final CentralRepository dbManager;
|
||||
private final PropertyChangeEvent event;
|
||||
private final String MODULE_NAME = Bundle.CaseEventsListener_module_name();
|
||||
|
||||
private OsAccountInstancesAddedTask(CentralRepository db, PropertyChangeEvent evt) {
|
||||
dbManager = db;
|
||||
event = evt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
//Nothing to do here if the central repo is not enabled or if ingest is running but is set to not save data/make artifacts
|
||||
if (!CentralRepository.isEnabled()
|
||||
|| (IngestManager.getInstance().isIngestRunning() && !(IngestEventsListener.isFlagSeenDevices() || IngestEventsListener.shouldCreateCrProperties()))) {
|
||||
return;
|
||||
}
|
||||
|
||||
final OsAcctInstancesAddedEvent osAcctInstancesAddedEvent = (OsAcctInstancesAddedEvent) event;
|
||||
List<OsAccountInstance> addedOsAccountNew = osAcctInstancesAddedEvent.getOsAccountInstances();
|
||||
for (OsAccountInstance osAccountInstance : addedOsAccountNew) {
|
||||
try {
|
||||
OsAccount osAccount = osAccountInstance.getOsAccount();
|
||||
List<CorrelationAttributeInstance> correlationAttributeInstances = CorrelationAttributeUtil.makeCorrAttrsToSave(osAccountInstance);
|
||||
if (correlationAttributeInstances.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Optional<String> accountAddr = osAccount.getAddr();
|
||||
try {
|
||||
// Save to the database if requested
|
||||
if (IngestEventsListener.shouldCreateCrProperties()) {
|
||||
for (CorrelationAttributeInstance correlationAttributeInstance : correlationAttributeInstances) {
|
||||
dbManager.addArtifactInstance(correlationAttributeInstance);
|
||||
}
|
||||
}
|
||||
|
||||
// Look up and create artifacts for previously seen accounts if requested
|
||||
if (IngestEventsListener.isFlagSeenDevices()) {
|
||||
|
||||
CorrelationAttributeInstance instanceWithTypeValue = null;
|
||||
for (CorrelationAttributeInstance instance : correlationAttributeInstances) {
|
||||
if (instance.getCorrelationType().getId() == CorrelationAttributeInstance.OSACCOUNT_TYPE_ID) {
|
||||
instanceWithTypeValue = instance;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (instanceWithTypeValue != null) {
|
||||
List<CorrelationAttributeInstance> previousOccurences = dbManager.getArtifactInstancesByTypeValue(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue());
|
||||
|
||||
for (CorrelationAttributeInstance instance : previousOccurences) {
|
||||
//we can get the first instance here since the case for all attributes will be the same
|
||||
if (!instance.getCorrelationCase().getCaseUUID().equals(instanceWithTypeValue.getCorrelationCase().getCaseUUID())) {
|
||||
SleuthkitCase tskCase = osAccount.getSleuthkitCase();
|
||||
Blackboard blackboard = tskCase.getBlackboard();
|
||||
|
||||
List<String> caseDisplayNames = dbManager.getListCasesHavingArtifactInstances(instanceWithTypeValue.getCorrelationType(), instanceWithTypeValue.getCorrelationValue());
|
||||
|
||||
// calculate score
|
||||
Score score;
|
||||
int numCases = caseDisplayNames.size();
|
||||
if (numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
|
||||
score = Score.SCORE_LIKELY_NOTABLE;
|
||||
} else if (numCases > IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= IngestEventsListener.MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
|
||||
score = Score.SCORE_NONE;
|
||||
} else {
|
||||
// don't make an Analysis Result, the artifact is too common.
|
||||
continue;
|
||||
}
|
||||
|
||||
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
|
||||
String justification = "Previously seen in cases " + prevCases;
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.CaseEventsListener_prevExists_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
instance.getCorrelationType().getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
instanceWithTypeValue.getCorrelationValue()),
|
||||
new BlackboardAttribute(
|
||||
TSK_OTHER_CASES, MODULE_NAME,
|
||||
prevCases));
|
||||
BlackboardArtifact newAnalysisResult = osAccount.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score,
|
||||
null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult();
|
||||
try {
|
||||
blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null);
|
||||
break;
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Exception with Correlation Attribute Normalization.", ex); //NON-NLS
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Cannot get central repository for OsAccount: %s.", accountAddr.get()), ex); //NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Cannot get central repository for OsAccount: " + "OsAccount", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final class DataSourceNameChangedTask implements Runnable {
|
||||
|
||||
private final CentralRepository dbManager;
|
||||
@ -816,6 +671,7 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
LOGGER.log(Level.SEVERE, "No open case", ex);
|
||||
}
|
||||
}
|
||||
} // DATA_SOURCE_NAME_CHANGED
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,673 +0,0 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2017-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.centralrepository.eventlisteners;
|
||||
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import static java.lang.Boolean.FALSE;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
||||
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
|
||||
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisEvent;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Listen for ingest events and update entries in the Central Repository
|
||||
* database accordingly
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Central Repository"})
|
||||
public class IngestEventsListener {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
|
||||
private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.DATA_SOURCE_ANALYSIS_COMPLETED);
|
||||
private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(DATA_ADDED);
|
||||
private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
|
||||
private static int correlationModuleInstanceCount;
|
||||
private static boolean flagNotableItems;
|
||||
private static boolean flagSeenDevices;
|
||||
private static boolean createCrProperties;
|
||||
private static boolean flagUniqueArtifacts;
|
||||
private static final String INGEST_EVENT_THREAD_NAME = "Ingest-Event-Listener-%d";
|
||||
private final ExecutorService jobProcessingExecutor;
|
||||
private final PropertyChangeListener pcl1 = new IngestModuleEventListener();
|
||||
private final PropertyChangeListener pcl2 = new IngestJobEventListener();
|
||||
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
|
||||
|
||||
static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10;
|
||||
static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20;
|
||||
|
||||
public IngestEventsListener() {
|
||||
jobProcessingExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat(INGEST_EVENT_THREAD_NAME).build());
|
||||
}
|
||||
|
||||
public void shutdown() {
|
||||
ThreadUtils.shutDownTaskExecutor(jobProcessingExecutor);
|
||||
}
|
||||
|
||||
/*
|
||||
* Add all of our Ingest Event Listeners to the IngestManager Instance.
|
||||
*/
|
||||
public void installListeners() {
|
||||
IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl1);
|
||||
IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl2);
|
||||
}
|
||||
|
||||
/*
|
||||
* Remove all of our Ingest Event Listeners from the IngestManager Instance.
|
||||
*/
|
||||
public void uninstallListeners() {
|
||||
IngestManager.getInstance().removeIngestModuleEventListener(pcl1);
|
||||
IngestManager.getInstance().removeIngestJobEventListener(pcl2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Increase the number of IngestEventsListeners adding contents to the
|
||||
* Central Repository.
|
||||
*/
|
||||
public synchronized static void incrementCorrelationEngineModuleCount() {
|
||||
correlationModuleInstanceCount++; //Should be called once in the Central Repository module's startup method.
|
||||
}
|
||||
|
||||
/**
|
||||
* Decrease the number of IngestEventsListeners adding contents to the
|
||||
* Central Repository.
|
||||
*/
|
||||
public synchronized static void decrementCorrelationEngineModuleCount() {
|
||||
if (getCeModuleInstanceCount() > 0) { //prevent it ingestJobCounter from going negative
|
||||
correlationModuleInstanceCount--; //Should be called once in the Central Repository module's shutdown method.
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the counter which keeps track of if the Central Repository Module
|
||||
* is being run during injest to 0.
|
||||
*/
|
||||
synchronized static void resetCeModuleInstanceCount() {
|
||||
correlationModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether or not the Central Repository Module is enabled for any of the
|
||||
* currently running ingest jobs.
|
||||
*
|
||||
* @return boolean True for Central Repository enabled, False for disabled
|
||||
*/
|
||||
public synchronized static int getCeModuleInstanceCount() {
|
||||
return correlationModuleInstanceCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are notable items being flagged?
|
||||
*
|
||||
* @return True if flagging notable items; otherwise false.
|
||||
*/
|
||||
public synchronized static boolean isFlagNotableItems() {
|
||||
return flagNotableItems;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are previously seen devices being flagged?
|
||||
*
|
||||
* @return True if flagging seen devices; otherwise false.
|
||||
*/
|
||||
public synchronized static boolean isFlagSeenDevices() {
|
||||
return flagSeenDevices;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are correlation properties being created
|
||||
*
|
||||
* @return True if creating correlation properties; otherwise false.
|
||||
*/
|
||||
public synchronized static boolean shouldCreateCrProperties() {
|
||||
return createCrProperties;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the listener to flag notable items or not.
|
||||
*
|
||||
* @param value True to flag notable items; otherwise false.
|
||||
*/
|
||||
public synchronized static void setFlagNotableItems(boolean value) {
|
||||
flagNotableItems = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the listener to flag previously seen devices or not.
|
||||
*
|
||||
* @param value True to flag seen devices; otherwise false.
|
||||
*/
|
||||
public synchronized static void setFlagSeenDevices(boolean value) {
|
||||
flagSeenDevices = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the listener to flag unique apps or not.
|
||||
*
|
||||
* @param value True to flag unique apps; otherwise false.
|
||||
*/
|
||||
public synchronized static void setFlagUniqueArtifacts(boolean value) {
|
||||
flagUniqueArtifacts = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Are unique apps being flagged?
|
||||
*
|
||||
* @return True if flagging unique apps; otherwise false.
|
||||
*/
|
||||
public synchronized static boolean isFlagUniqueArtifacts() {
|
||||
return flagUniqueArtifacts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the listener to create correlation properties
|
||||
*
|
||||
* @param value True to create properties; otherwise false.
|
||||
*/
|
||||
public synchronized static void setCreateCrProperties(boolean value) {
|
||||
createCrProperties = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make a "previously seen" artifact based on a new artifact being
|
||||
* previously seen.
|
||||
*
|
||||
* @param originalArtifact Original artifact that we want to flag
|
||||
* @param caseDisplayNames List of case names artifact was previously seen
|
||||
* in
|
||||
* @param aType The correlation type.
|
||||
* @param value The correlation value.
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
|
||||
"IngestEventsListener.prevCaseComment.text=Previous Case: "})
|
||||
static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
|
||||
CorrelationAttributeInstance.Type aType, String value) {
|
||||
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
|
||||
String justification = "Previously marked as notable in cases " + prevCases;
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevTaggedSet_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value),
|
||||
new BlackboardAttribute(
|
||||
TSK_OTHER_CASES, MODULE_NAME,
|
||||
prevCases));
|
||||
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevTaggedSet_text(),
|
||||
Score.SCORE_NOTABLE, justification);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a "previously seen" hit for a device which was previously seen in
|
||||
* the central repository. NOTE: Artifacts that are too common will be
|
||||
* skipped.
|
||||
*
|
||||
* @param originalArtifact the artifact to create the "previously seen" item
|
||||
* for
|
||||
* @param caseDisplayNames the case names the artifact was previously seen
|
||||
* in
|
||||
* @param aType The correlation type.
|
||||
* @param value The correlation value.
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
|
||||
"# {0} - typeName",
|
||||
"# {1} - count",
|
||||
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
|
||||
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
|
||||
CorrelationAttributeInstance.Type aType, String value) {
|
||||
|
||||
// calculate score
|
||||
Score score;
|
||||
int numCases = caseDisplayNames.size();
|
||||
if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
|
||||
score = Score.SCORE_LIKELY_NOTABLE;
|
||||
} else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
|
||||
score = Score.SCORE_NONE;
|
||||
} else {
|
||||
// don't make an Analysis Result, the artifact is too common.
|
||||
return;
|
||||
}
|
||||
|
||||
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
|
||||
String justification = "Previously seen in cases " + prevCases;
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevExists_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value),
|
||||
new BlackboardAttribute(
|
||||
TSK_OTHER_CASES, MODULE_NAME,
|
||||
prevCases));
|
||||
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(),
|
||||
score, justification);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a "previously unseen" hit for an application which was never seen
|
||||
* in the central repository.
|
||||
*
|
||||
* @param originalArtifact the artifact to create the "previously unseen"
|
||||
* item for
|
||||
* @param aType The correlation type.
|
||||
* @param value The correlation value.
|
||||
*/
|
||||
static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) {
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value));
|
||||
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "",
|
||||
Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before");
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an artifact to flag the passed in artifact.
|
||||
*
|
||||
* @param newArtifactType Type of artifact to create.
|
||||
* @param originalArtifact Artifact in current case we want to flag
|
||||
* @param attributesForNewArtifact Attributes to assign to the new artifact
|
||||
* @param configuration The configuration to be specified for the
|
||||
* new artifact hit
|
||||
* @param score sleuthkit.datamodel.Score to be assigned
|
||||
* to this artifact
|
||||
* @param justification Justification string
|
||||
*/
|
||||
private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration,
|
||||
Score score, String justification) {
|
||||
try {
|
||||
SleuthkitCase tskCase = originalArtifact.getSleuthkitCase();
|
||||
Blackboard blackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID());
|
||||
if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) {
|
||||
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
|
||||
newArtifactType, score,
|
||||
null, configuration, justification, attributesForNewArtifact)
|
||||
.getAnalysisResult();
|
||||
|
||||
try {
|
||||
blackboard.postArtifact(newArtifact, MODULE_NAME, null);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
|
||||
} catch (IllegalStateException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
private class IngestModuleEventListener implements PropertyChangeListener {
|
||||
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
//if ingest is running we want there to check if there is a Central Repository module running
|
||||
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
|
||||
//in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate
|
||||
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
|
||||
CentralRepository dbManager;
|
||||
try {
|
||||
dbManager = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
|
||||
return;
|
||||
}
|
||||
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
|
||||
case DATA_ADDED: {
|
||||
//if ingest isn't running create the "previously seen" items,
|
||||
// otherwise use the ingest module setting to determine if we create "previously seen" items
|
||||
boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems();
|
||||
boolean flagPrevious = !IngestManager.getInstance().isIngestRunning() || isFlagSeenDevices();
|
||||
boolean createAttributes = !IngestManager.getInstance().isIngestRunning() || shouldCreateCrProperties();
|
||||
boolean flagUnique = !IngestManager.getInstance().isIngestRunning() || isFlagUniqueArtifacts();
|
||||
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable, flagPrevious, createAttributes, flagUnique));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class IngestJobEventListener implements PropertyChangeListener {
|
||||
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
CentralRepository dbManager;
|
||||
try {
|
||||
dbManager = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
|
||||
return;
|
||||
}
|
||||
|
||||
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
|
||||
case DATA_SOURCE_ANALYSIS_COMPLETED: {
|
||||
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private final class AnalysisCompleteTask implements Runnable {
|
||||
|
||||
private final CentralRepository dbManager;
|
||||
private final PropertyChangeEvent event;
|
||||
|
||||
private AnalysisCompleteTask(CentralRepository db, PropertyChangeEvent evt) {
|
||||
dbManager = db;
|
||||
event = evt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
// clear the tracker to reduce memory usage
|
||||
if (getCeModuleInstanceCount() == 0) {
|
||||
recentlyAddedCeArtifacts.clear();
|
||||
}
|
||||
//else another instance of the Central Repository Module is still being run.
|
||||
|
||||
/*
|
||||
* Ensure the data source in the Central Repository has hash values
|
||||
* that match those in the case database.
|
||||
*/
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
return;
|
||||
}
|
||||
Content dataSource;
|
||||
String dataSourceName = "";
|
||||
long dataSourceObjectId = -1;
|
||||
try {
|
||||
dataSource = ((DataSourceAnalysisEvent) event).getDataSource();
|
||||
/*
|
||||
* We only care about Images for the purpose of updating hash
|
||||
* values.
|
||||
*/
|
||||
if (!(dataSource instanceof Image)) {
|
||||
return;
|
||||
}
|
||||
|
||||
dataSourceName = dataSource.getName();
|
||||
dataSourceObjectId = dataSource.getId();
|
||||
|
||||
Case openCase = Case.getCurrentCaseThrows();
|
||||
|
||||
CorrelationCase correlationCase = dbManager.getCase(openCase);
|
||||
if (null == correlationCase) {
|
||||
correlationCase = dbManager.newCase(openCase);
|
||||
}
|
||||
|
||||
CorrelationDataSource correlationDataSource = dbManager.getDataSource(correlationCase, dataSource.getId());
|
||||
if (correlationDataSource == null) {
|
||||
// Add the data source.
|
||||
CorrelationDataSource.fromTSKDataSource(correlationCase, dataSource);
|
||||
} else {
|
||||
// Sync the data source hash values if necessary.
|
||||
if (dataSource instanceof Image) {
|
||||
Image image = (Image) dataSource;
|
||||
|
||||
String imageMd5Hash = image.getMd5();
|
||||
if (imageMd5Hash == null) {
|
||||
imageMd5Hash = "";
|
||||
}
|
||||
String crMd5Hash = correlationDataSource.getMd5();
|
||||
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
|
||||
correlationDataSource.setMd5(imageMd5Hash);
|
||||
}
|
||||
|
||||
String imageSha1Hash = image.getSha1();
|
||||
if (imageSha1Hash == null) {
|
||||
imageSha1Hash = "";
|
||||
}
|
||||
String crSha1Hash = correlationDataSource.getSha1();
|
||||
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
|
||||
correlationDataSource.setSha1(imageSha1Hash);
|
||||
}
|
||||
|
||||
String imageSha256Hash = image.getSha256();
|
||||
if (imageSha256Hash == null) {
|
||||
imageSha256Hash = "";
|
||||
}
|
||||
String crSha256Hash = correlationDataSource.getSha256();
|
||||
if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) {
|
||||
correlationDataSource.setSha256(imageSha256Hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format(
|
||||
"Unable to fetch data from the Central Repository for data source '%s' (obj_id=%d)",
|
||||
dataSourceName, dataSourceObjectId), ex);
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
LOGGER.log(Level.SEVERE, "No current case opened.", ex);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format(
|
||||
"Unable to fetch data from the case database for data source '%s' (obj_id=%d)",
|
||||
dataSourceName, dataSourceObjectId), ex);
|
||||
}
|
||||
} // DATA_SOURCE_ANALYSIS_COMPLETED
|
||||
}
|
||||
|
||||
private final class DataAddedTask implements Runnable {
|
||||
|
||||
private final CentralRepository dbManager;
|
||||
private final PropertyChangeEvent event;
|
||||
private final boolean flagNotableItemsEnabled;
|
||||
private final boolean flagPreviousItemsEnabled;
|
||||
private final boolean createCorrelationAttributes;
|
||||
private final boolean flagUniqueItemsEnabled;
|
||||
|
||||
private DataAddedTask(CentralRepository db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes, boolean flagUnique) {
|
||||
this.dbManager = db;
|
||||
this.event = evt;
|
||||
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
|
||||
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
|
||||
this.createCorrelationAttributes = createCorrelationAttributes;
|
||||
this.flagUniqueItemsEnabled = flagUnique;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
return;
|
||||
}
|
||||
final ModuleDataEvent mde = (ModuleDataEvent) event.getOldValue();
|
||||
Collection<BlackboardArtifact> bbArtifacts = mde.getArtifacts();
|
||||
if (null == bbArtifacts) { //the ModuleDataEvents don't always have a collection of artifacts set
|
||||
return;
|
||||
}
|
||||
List<CorrelationAttributeInstance> eamArtifacts = new ArrayList<>();
|
||||
|
||||
for (BlackboardArtifact bbArtifact : bbArtifacts) {
|
||||
// makeCorrAttrToSave will filter out artifacts which should not be sources of CR data.
|
||||
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
|
||||
if (bbArtifact instanceof DataArtifact) {
|
||||
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact));
|
||||
}
|
||||
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
|
||||
try {
|
||||
// Only do something with this artifact if it's unique within the job
|
||||
if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) {
|
||||
|
||||
// Get a list of instances for a given value (hash, email, etc.)
|
||||
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
|
||||
// check if we are flagging things
|
||||
if (flagNotableItemsEnabled || flagPreviousItemsEnabled || flagUniqueItemsEnabled) {
|
||||
try {
|
||||
previousOccurrences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
|
||||
// make sure the previous instances do not contain current case
|
||||
for (Iterator<CorrelationAttributeInstance> iterator = previousOccurrences.iterator(); iterator.hasNext();) {
|
||||
CorrelationAttributeInstance instance = iterator.next();
|
||||
if (instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) {
|
||||
// this is the current case - remove the instace from the previousOccurrences list
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
LOGGER.log(Level.INFO, String.format("Unable to flag previously seen device: %s.", eamArtifact.toString()), ex);
|
||||
}
|
||||
}
|
||||
|
||||
// Was it previously marked as bad?
|
||||
// query db for artifact instances having this TYPE/VALUE and knownStatus = "Bad".
|
||||
// if getKnownStatus() is "Unknown" and this artifact instance was marked bad in a previous case,
|
||||
// create TSK_PREVIOUSLY_SEEN artifact on BB.
|
||||
if (flagNotableItemsEnabled) {
|
||||
List<String> caseDisplayNames = getCaseDisplayNamesForNotable(previousOccurrences);
|
||||
if (!caseDisplayNames.isEmpty()) {
|
||||
makeAndPostPreviousNotableArtifact(bbArtifact,
|
||||
caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
|
||||
// if we have marked this artifact as notable, then skip the analysis of whether it was previously seen
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// flag previously seen devices and communication accounts (emails, phones, etc)
|
||||
if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty()
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) {
|
||||
|
||||
List<String> caseDisplayNames = getCaseDisplayNames(previousOccurrences);
|
||||
makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
}
|
||||
|
||||
// flag previously unseen apps and domains
|
||||
if (flagUniqueItemsEnabled
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
|
||||
|
||||
if (previousOccurrences.isEmpty()) {
|
||||
makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
}
|
||||
}
|
||||
if (createCorrelationAttributes) {
|
||||
eamArtifacts.add(eamArtifact);
|
||||
}
|
||||
}
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error counting notable artifacts.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (FALSE == eamArtifacts.isEmpty()) {
|
||||
for (CorrelationAttributeInstance eamArtifact : eamArtifacts) {
|
||||
try {
|
||||
dbManager.addArtifactInstance(eamArtifact);
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error adding artifact to database.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
} // DATA_ADDED
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets case display names for a list of CorrelationAttributeInstance.
|
||||
*
|
||||
* @param occurrences List of CorrelationAttributeInstance
|
||||
*
|
||||
* @return List of case display names
|
||||
*/
|
||||
private List<String> getCaseDisplayNames(List<CorrelationAttributeInstance> occurrences) {
|
||||
List<String> caseNames = new ArrayList<>();
|
||||
for (CorrelationAttributeInstance occurrence : occurrences) {
|
||||
caseNames.add(occurrence.getCorrelationCase().getDisplayName());
|
||||
}
|
||||
return caseNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets case display names for only occurrences marked as NOTABLE/BAD.
|
||||
*
|
||||
* @param occurrences List of CorrelationAttributeInstance
|
||||
*
|
||||
* @return List of case display names of NOTABLE/BAD occurrences
|
||||
*/
|
||||
private List<String> getCaseDisplayNamesForNotable(List<CorrelationAttributeInstance> occurrences) {
|
||||
List<String> caseNames = new ArrayList<>();
|
||||
for (CorrelationAttributeInstance occurrence : occurrences) {
|
||||
if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) {
|
||||
caseNames.add(occurrence.getCorrelationCase().getDisplayName());
|
||||
}
|
||||
}
|
||||
return caseNames;
|
||||
}
|
||||
}
|
@ -1,12 +1,26 @@
|
||||
CentralRepoIngestModel_name_header=Name:<br>
|
||||
CentralRepoIngestModel_previous_case_header=<br>Previous Cases:<br>
|
||||
CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module.
|
||||
CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized
|
||||
CentralRepoIngestModule.prevCaseComment.text=Previous Case:
|
||||
CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)
|
||||
CentralRepoIngestModule_notable_message_header=<html>A file in this data source was previously seen and tagged as Notable.<br>
|
||||
# {0} - Name of file that is Notable
|
||||
CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}
|
||||
CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type
|
||||
CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository
|
||||
CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository
|
||||
CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository
|
||||
CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository
|
||||
CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled
|
||||
CentralRepoIngestModule_filename_inbox_msg_header=File Name
|
||||
CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash
|
||||
CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository
|
||||
CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case
|
||||
CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute
|
||||
# {0} - Name of item that is Notable
|
||||
CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}
|
||||
# {0} - list of cases
|
||||
CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}
|
||||
CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)
|
||||
CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases
|
||||
# {0} - list of cases
|
||||
CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}
|
||||
CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)
|
||||
CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases
|
||||
CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)
|
||||
CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases
|
||||
CentralRepoIngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation
|
||||
CentralRepoIngestModuleFactory.ingestmodule.name=Central Repository
|
||||
IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021 Basis Technology Corp.
|
||||
* Copyright 2021-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,37 +18,328 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.getOccurrencesInOtherCases;
|
||||
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult;
|
||||
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevSeenAnalysisResult;
|
||||
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevUnseenAnalysisResult;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataArtifactIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.OsAccount;
|
||||
import org.sleuthkit.datamodel.OsAccountManager;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* RJCTODO
|
||||
*
|
||||
* NOTE TO REVIEWER:
|
||||
*
|
||||
* This is a placeholder data artifact ingest module that counts the number of
|
||||
* data artifacts it processes and posts the final count to the ingest inbox.
|
||||
* The guts of the module will be supplied by a later PR.
|
||||
* A data artifact ingest module that adds correlation attributes for data
|
||||
* artifacts and OS accounts to the central repository and makes analysis
|
||||
* results based on previous occurences. When the ingest job is completed,
|
||||
* ensures the data source in the central repository has hash values that match
|
||||
* those in the case database.
|
||||
*/
|
||||
public class CentralRepoDataArtifactIngestModule implements DataArtifactIngestModule {
|
||||
|
||||
private final AtomicLong artifactCounter = new AtomicLong();
|
||||
private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName());
|
||||
private final boolean flagNotableItems;
|
||||
private final boolean flagPrevSeenDevices;
|
||||
private final boolean flagUniqueArtifacts;
|
||||
private final boolean saveCorrAttrInstances;
|
||||
private final Set<String> corrAttrValuesAlreadyProcessed;
|
||||
private CentralRepository centralRepo;
|
||||
private IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Constructs a data artifact ingest module that adds correlation attributes
|
||||
* for data artifacts and OS accounts to the central repository and makes
|
||||
* analysis results based on previous occurences. When the ingest job is
|
||||
* completed, ensures the data source in the central repository has hash
|
||||
* values that match those in the case database.
|
||||
*
|
||||
* @param settings The ingest job settings for this module.
|
||||
*/
|
||||
CentralRepoDataArtifactIngestModule(IngestSettings settings) {
|
||||
flagNotableItems = settings.isFlagTaggedNotableItems();
|
||||
flagPrevSeenDevices = settings.isFlagPreviousDevices();
|
||||
flagUniqueArtifacts = settings.isFlagUniqueArtifacts();
|
||||
saveCorrAttrInstances = settings.shouldCreateCorrelationProperties();
|
||||
corrAttrValuesAlreadyProcessed = new LinkedHashSet<>();
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_crNotEnabledErrMsg=Central repository required, but not enabled",
|
||||
"CentralRepoIngestModule_crInaccessibleErrMsg=Error accessing central repository",
|
||||
"CentralRepoIngestModule_noCurrentCaseErrMsg=Error getting current case",
|
||||
"CentralRepoIngestModule_crDatabaseTypeMismatch=Mulit-user cases require a PostgreSQL central repository"
|
||||
})
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg()); // May be displayed to user.
|
||||
}
|
||||
|
||||
try {
|
||||
centralRepo = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex);
|
||||
}
|
||||
|
||||
/*
|
||||
* Don't allow a SQLite central repository to be used for a multi-user
|
||||
* case.
|
||||
*/
|
||||
try {
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
if ((currentCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) && (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crDatabaseTypeMismatch());
|
||||
}
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Translates the attributes of a data artifact into central repository
|
||||
* correlation attributes and uses them to create analysis results and new
|
||||
* central repository correlation attribute instances, depending on ingest
|
||||
* job settings.
|
||||
*
|
||||
* @param artifact The data artifact.
|
||||
*
|
||||
* @return An ingest module process result.
|
||||
*/
|
||||
@Override
|
||||
public ProcessResult process(DataArtifact artifact) {
|
||||
artifactCounter.incrementAndGet();
|
||||
if (flagNotableItems || flagPrevSeenDevices || flagUniqueArtifacts || saveCorrAttrInstances) {
|
||||
for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(artifact)) {
|
||||
if (corrAttrValuesAlreadyProcessed.add(corrAttr.toString())) {
|
||||
makeAnalysisResults(artifact, corrAttr);
|
||||
if (saveCorrAttrInstances) {
|
||||
try {
|
||||
centralRepo.addAttributeInstanceBulk(corrAttr);
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s' (job ID=%d)", corrAttr, artifact, context.getJobId()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes analysis results for a data artifact based on previous occurrences,
|
||||
* if any, of a correlation attribute.
|
||||
*
|
||||
* @param artifact The data artifact.
|
||||
* @param corrAttr A correlation attribute for the data artifact.
|
||||
*/
|
||||
private void makeAnalysisResults(DataArtifact artifact, CorrelationAttributeInstance corrAttr) {
|
||||
List<CorrelationAttributeInstance> previousOccurrences = null;
|
||||
if (flagNotableItems) {
|
||||
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
|
||||
if (!previousOccurrences.isEmpty()) {
|
||||
Set<String> previousCases = new HashSet<>();
|
||||
for (CorrelationAttributeInstance occurrence : previousOccurrences) {
|
||||
if (occurrence.getKnownStatus() == TskData.FileKnown.BAD) {
|
||||
previousCases.add(occurrence.getCorrelationCase().getDisplayName());
|
||||
}
|
||||
}
|
||||
if (!previousCases.isEmpty()) {
|
||||
makePrevNotableAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (flagPrevSeenDevices
|
||||
&& (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.MAC_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.EMAIL_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.PHONE_TYPE_ID)) {
|
||||
if (previousOccurrences == null) {
|
||||
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
|
||||
}
|
||||
if (!previousOccurrences.isEmpty()) {
|
||||
Set<String> previousCases = getPreviousCases(previousOccurrences);
|
||||
if (!previousCases.isEmpty()) {
|
||||
makePrevSeenAnalysisResult(artifact, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (flagUniqueArtifacts
|
||||
&& (corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|
||||
|| corrAttr.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
|
||||
if (previousOccurrences == null) {
|
||||
previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
|
||||
}
|
||||
if (previousOccurrences.isEmpty()) {
|
||||
makePrevUnseenAnalysisResult(artifact, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a unique set of previous cases, represented by their names, from a
|
||||
* list of previous occurrences of correlation attributes.
|
||||
*
|
||||
* @param previousOccurrences The correlations attributes.
|
||||
*
|
||||
* @return The names of the previous cases.
|
||||
*/
|
||||
private Set<String> getPreviousCases(List<CorrelationAttributeInstance> previousOccurrences) {
|
||||
Set<String> previousCases = new HashSet<>();
|
||||
for (CorrelationAttributeInstance occurrence : previousOccurrences) {
|
||||
previousCases.add(occurrence.getCorrelationCase().getDisplayName());
|
||||
}
|
||||
return previousCases;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutDown() {
|
||||
IngestServices.getInstance().postMessage(IngestMessage.createMessage(
|
||||
IngestMessage.MessageType.INFO,
|
||||
CentralRepoIngestModuleFactory.getModuleName(),
|
||||
String.format("%d data artifacts processed", artifactCounter.get()))); //NON-NLS
|
||||
analyzeOsAccounts();
|
||||
if (saveCorrAttrInstances) {
|
||||
try {
|
||||
centralRepo.commitAttributeInstancesBulk();
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error doing final bulk commit of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
syncDataSourceHashes();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries the case database for any OS accounts assoicated with the data
|
||||
* source for the ingest job. The attributes of any OS account returned by
|
||||
* the query are translated into central repository correlation attributes
|
||||
* and used them to create analysis results and new central repository
|
||||
* correlation attribute instances, depending on ingest job settings.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_prevSeenOsAcctSetName=Users seen in previous cases",
|
||||
"CentralRepoIngestModule_prevSeenOsAcctConfig=Previously Seen Users (Central Repository)"
|
||||
})
|
||||
private void analyzeOsAccounts() {
|
||||
if (saveCorrAttrInstances || flagPrevSeenDevices) {
|
||||
try {
|
||||
OsAccountManager osAccountMgr = Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager();
|
||||
List<OsAccount> osAccounts = osAccountMgr.getOsAccountsByDataSourceObjId(context.getDataSource().getId());
|
||||
for (OsAccount osAccount : osAccounts) {
|
||||
for (CorrelationAttributeInstance corrAttr : CorrelationAttributeUtil.makeCorrAttrsToSave(osAccount, context.getDataSource())) {
|
||||
if (flagPrevSeenDevices) {
|
||||
makeAnalysisResults(osAccount, corrAttr);
|
||||
}
|
||||
if (saveCorrAttrInstances) {
|
||||
try {
|
||||
centralRepo.addAttributeInstanceBulk(corrAttr);
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error adding correlation attribute '%s' to central repository for '%s'(job ID=%d)", corrAttr, osAccount, context.getJobId()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error getting OS accounts for data source '%s' (job ID=%d)", context.getDataSource(), context.getJobId()), ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes analysis results for an OS Account based on previous occurrences,
|
||||
* if any, of a correlation attribute.
|
||||
*
|
||||
* @param artifact The data artifact.
|
||||
* @param corrAttr A correlation attribute for the data artifact.
|
||||
*/
|
||||
private void makeAnalysisResults(OsAccount osAccount, CorrelationAttributeInstance corrAttr) {
|
||||
if (flagPrevSeenDevices) {
|
||||
List<CorrelationAttributeInstance> previousOccurrences = getOccurrencesInOtherCases(corrAttr, context.getJobId());
|
||||
if (!previousOccurrences.isEmpty()) {
|
||||
Set<String> previousCases = getPreviousCases(previousOccurrences);
|
||||
if (!previousCases.isEmpty()) {
|
||||
makePrevSeenAnalysisResult(osAccount, previousCases, corrAttr.getCorrelationType(), corrAttr.getCorrelationValue(), context.getDataSource().getId(), context.getJobId());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the data source in the central repository has hash values that
|
||||
* match those in the case database.
|
||||
*/
|
||||
private void syncDataSourceHashes() {
|
||||
if (!(context.getDataSource() instanceof Image)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
CorrelationCase correlationCase = centralRepo.getCase(currentCase);
|
||||
if (correlationCase == null) {
|
||||
correlationCase = centralRepo.newCase(currentCase);
|
||||
}
|
||||
|
||||
CorrelationDataSource correlationDataSource = centralRepo.getDataSource(correlationCase, context.getDataSource().getId());
|
||||
if (correlationDataSource == null) {
|
||||
correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, context.getDataSource());
|
||||
}
|
||||
|
||||
Image image = (Image) context.getDataSource();
|
||||
String imageMd5Hash = image.getMd5();
|
||||
if (imageMd5Hash == null) {
|
||||
imageMd5Hash = "";
|
||||
}
|
||||
String crMd5Hash = correlationDataSource.getMd5();
|
||||
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
|
||||
correlationDataSource.setMd5(imageMd5Hash);
|
||||
}
|
||||
|
||||
String imageSha1Hash = image.getSha1();
|
||||
if (imageSha1Hash == null) {
|
||||
imageSha1Hash = "";
|
||||
}
|
||||
String crSha1Hash = correlationDataSource.getSha1();
|
||||
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
|
||||
correlationDataSource.setSha1(imageSha1Hash);
|
||||
}
|
||||
|
||||
String imageSha256Hash = image.getSha256();
|
||||
if (imageSha256Hash == null) {
|
||||
imageSha256Hash = "";
|
||||
}
|
||||
String crSha256Hash = correlationDataSource.getSha256();
|
||||
if (StringUtils.equals(imageSha256Hash, crSha256Hash) == false) {
|
||||
correlationDataSource.setSha256(imageSha256Hash);
|
||||
}
|
||||
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error fetching data from the central repository for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex);
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error fetching data from the case database for data source '%s' (job ID=%d)", context.getDataSource().getName(), context.getJobId()), ex);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Central Repository
|
||||
*
|
||||
* Copyright 2011-2021 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,11 +18,10 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
@ -32,95 +31,52 @@ import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatforms;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoDbManager;
|
||||
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
|
||||
import org.sleuthkit.autopsy.core.RuntimeProperties;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_PREVIOUSLY_NOTABLE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
|
||||
import org.sleuthkit.datamodel.HashUtility;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import static org.sleuthkit.autopsy.centralrepository.ingestmodule.CentralRepoIngestModuleUtils.makePrevNotableAnalysisResult;
|
||||
|
||||
/**
|
||||
* Ingest module for inserting entries into the Central Repository database on
|
||||
* ingest of a data source
|
||||
* A file ingest module that adds correlation attributes for files to the
|
||||
* central repository, and makes previously notable analysis results for files
|
||||
* marked as notable in other cases.
|
||||
*/
|
||||
@Messages({"CentralRepoIngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
|
||||
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
|
||||
final class CentralRepoIngestModule implements FileIngestModule {
|
||||
|
||||
private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
|
||||
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false;
|
||||
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false;
|
||||
static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false;
|
||||
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
|
||||
|
||||
private final static Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName());
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private static final Logger logger = Logger.getLogger(CentralRepoIngestModule.class.getName());
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter();
|
||||
private long jobId;
|
||||
private CorrelationCase eamCase;
|
||||
private CorrelationDataSource eamDataSource;
|
||||
private final boolean flagNotableItems;
|
||||
private final boolean saveCorrAttrInstances;
|
||||
private CorrelationAttributeInstance.Type filesType;
|
||||
private final boolean flagTaggedNotableItems;
|
||||
private final boolean flagPreviouslySeenDevices;
|
||||
private Blackboard blackboard;
|
||||
private final boolean createCorrelationProperties;
|
||||
private final boolean flagUniqueArtifacts;
|
||||
private IngestJobContext context;
|
||||
|
||||
private CentralRepository centralRepo;
|
||||
|
||||
/**
|
||||
* Instantiate the Central Repository ingest module.
|
||||
* Constructs a file ingest module that adds correlation attributes for
|
||||
* files to the central repository, and makes previously notable analysis
|
||||
* results for files marked as notable in other cases.
|
||||
*
|
||||
* @param settings The ingest settings for the module instance.
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
CentralRepoIngestModule(IngestSettings settings) {
|
||||
flagTaggedNotableItems = settings.isFlagTaggedNotableItems();
|
||||
flagPreviouslySeenDevices = settings.isFlagPreviousDevices();
|
||||
createCorrelationProperties = settings.shouldCreateCorrelationProperties();
|
||||
flagUniqueArtifacts = settings.isFlagUniqueArtifacts();
|
||||
}
|
||||
|
||||
flagNotableItems = settings.isFlagTaggedNotableItems();
|
||||
saveCorrAttrInstances = settings.shouldCreateCorrelationProperties();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProcessResult process(AbstractFile abstractFile) {
|
||||
if (CentralRepository.isEnabled() == false) {
|
||||
/*
|
||||
* Not signaling an error for now. This is a workaround for the way
|
||||
* all newly didscovered ingest modules are automatically anabled.
|
||||
*
|
||||
* TODO (JIRA-2731): Add isEnabled API for ingest modules.
|
||||
*/
|
||||
if (!flagNotableItems && !saveCorrAttrInstances) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) {
|
||||
if (!filesType.isEnabled()) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -128,290 +84,119 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
CentralRepository dbManager;
|
||||
try {
|
||||
dbManager = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
// only continue if we are correlating filesType
|
||||
if (!filesType.isEnabled()) {
|
||||
if (!CorrelationAttributeUtil.isSupportedAbstractFileType(abstractFile)) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
// get the hash because we're going to correlate it
|
||||
|
||||
/*
|
||||
* The correlation attribute value for a file is its MD5 hash. This
|
||||
* module cannot do anything with a file if the hash calculation has not
|
||||
* been done, but the decision has been made to not do a hash
|
||||
* calculation here if the file hashing and lookup module is not in this
|
||||
* pipeline ahead of this module (affirmed per BC, 11/8/21).
|
||||
*/
|
||||
String md5 = abstractFile.getMd5Hash();
|
||||
if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
/*
|
||||
* Search the central repo to see if this file was previously marked as
|
||||
* being bad. Create artifact if it was.
|
||||
*/
|
||||
if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) {
|
||||
if (flagNotableItems) {
|
||||
try {
|
||||
TimingMetric timingMetric = HealthMonitor.getTimingMetric("Central Repository: Notable artifact query");
|
||||
List<String> caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5);
|
||||
Set<String> otherCases = new HashSet<>();
|
||||
otherCases.addAll(centralRepo.getListCasesHavingArtifactInstancesKnownBad(filesType, md5));
|
||||
HealthMonitor.submitTimingMetric(timingMetric);
|
||||
if (!caseDisplayNamesList.isEmpty()) {
|
||||
postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList, filesType, md5);
|
||||
if (!otherCases.isEmpty()) {
|
||||
makePrevNotableAnalysisResult(abstractFile, otherCases, filesType, md5, context.getDataSource().getId(), context.getJobId());
|
||||
}
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
logger.log(Level.INFO, "Error searching database for artifact.", ex); // NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
// insert this file into the central repository
|
||||
if (createCorrelationProperties) {
|
||||
try {
|
||||
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
|
||||
filesType,
|
||||
md5,
|
||||
eamCase,
|
||||
eamDataSource,
|
||||
abstractFile.getParentPath() + abstractFile.getName(),
|
||||
null,
|
||||
TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database.
|
||||
,
|
||||
abstractFile.getId());
|
||||
dbManager.addAttributeInstanceBulk(cefi);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
logger.log(Level.INFO, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
if (saveCorrAttrInstances) {
|
||||
List<CorrelationAttributeInstance> corrAttrs = CorrelationAttributeUtil.makeCorrAttrsToSave(abstractFile);
|
||||
for (CorrelationAttributeInstance corrAttr : corrAttrs) {
|
||||
try {
|
||||
centralRepo.addAttributeInstanceBulk(corrAttr);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutDown() {
|
||||
IngestEventsListener.decrementCorrelationEngineModuleCount();
|
||||
|
||||
if ((CentralRepository.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) {
|
||||
return;
|
||||
if (refCounter.decrementAndGet(context.getJobId()) == 0) {
|
||||
try {
|
||||
centralRepo.commitAttributeInstancesBulk();
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error committing bulk insert of correlation attributes (job ID=%d)", context.getJobId()), ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
CentralRepository dbManager;
|
||||
try {
|
||||
dbManager = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
dbManager.commitAttributeInstancesBulk();
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS
|
||||
}
|
||||
try {
|
||||
Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamDataSource);
|
||||
logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS
|
||||
}
|
||||
|
||||
// TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk.
|
||||
refCounter.decrementAndGet(jobId);
|
||||
}
|
||||
|
||||
// see ArtifactManagerTimeTester for details
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"CentralRepoIngestModule.notfyBubble.title=Central Repository Not Initialized",
|
||||
"CentralRepoIngestModule.errorMessage.isNotEnabled=Central repository settings are not initialized, cannot run Central Repository ingest module."
|
||||
"CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg=Correlation attribute type for files not found in the central repository",
|
||||
"CentralRepoIngestModule_cannotGetCrCaseErrMsg=Case not present in the central repository",
|
||||
"CentralRepoIngestModule_cannotGetCrDataSourceErrMsg=Data source not present in the central repository"
|
||||
})
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
IngestEventsListener.incrementCorrelationEngineModuleCount();
|
||||
|
||||
if (!CentralRepository.isEnabled()) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crNotEnabledErrMsg());
|
||||
}
|
||||
|
||||
try {
|
||||
centralRepo = CentralRepository.getInstance();
|
||||
} catch (CentralRepoException ex) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_crInaccessibleErrMsg(), ex);
|
||||
}
|
||||
|
||||
/*
|
||||
* Tell the IngestEventsListener to flag notable items based on the
|
||||
* current module's configuration. This is a work around for the lack of
|
||||
* an artifacts pipeline. Note that this can be changed by another
|
||||
* module instance. All modules are affected by the value. While not
|
||||
* ideal, this will be good enough until a better solution can be
|
||||
* posited.
|
||||
*
|
||||
* Note: Flagging cannot be disabled if any other instances of the
|
||||
* Central Repository module are running. This restriction is to prevent
|
||||
* missing results in the case where the first module is flagging
|
||||
* notable items, and the proceeding module (with flagging disabled)
|
||||
* causes the first to stop flagging.
|
||||
* Make sure the correlation attribute type definition is in the central
|
||||
* repository. Currently (11/8/21) it is cached, but there is no harm in
|
||||
* saving it here for use in process().
|
||||
*/
|
||||
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) {
|
||||
IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems);
|
||||
}
|
||||
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagSeenDevices()) {
|
||||
IngestEventsListener.setFlagSeenDevices(flagPreviouslySeenDevices);
|
||||
}
|
||||
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.shouldCreateCrProperties()) {
|
||||
IngestEventsListener.setCreateCrProperties(createCorrelationProperties);
|
||||
}
|
||||
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) {
|
||||
IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts);
|
||||
}
|
||||
|
||||
if (CentralRepository.isEnabled() == false) {
|
||||
/*
|
||||
* Not throwing the customary exception for now. This is a
|
||||
* workaround for the way all newly didscovered ingest modules are
|
||||
* automatically anabled.
|
||||
*
|
||||
* TODO (JIRA-2731): Add isEnabled API for ingest modules.
|
||||
*/
|
||||
if (RuntimeProperties.runningWithGUI()) {
|
||||
if (1L == warningMsgRefCounter.incrementAndGet(jobId)) {
|
||||
MessageNotifyUtil.Notify.warn(Bundle.CentralRepoIngestModule_notfyBubble_title(), Bundle.CentralRepoIngestModule_errorMessage_isNotEnabled());
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
Case autopsyCase;
|
||||
try {
|
||||
autopsyCase = Case.getCurrentCaseThrows();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
|
||||
throw new IngestModuleException("Exception while getting open case.", ex);
|
||||
}
|
||||
|
||||
// Don't allow sqlite central repo databases to be used for multi user cases
|
||||
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
|
||||
&& (CentralRepoDbManager.getSavedDbChoice().getDbPlatform() == CentralRepoPlatforms.SQLITE)) {
|
||||
logger.log(Level.SEVERE, "Cannot run Central Repository ingest module on a multi-user case with a SQLite central repository.");
|
||||
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
|
||||
}
|
||||
jobId = context.getJobId();
|
||||
|
||||
CentralRepository centralRepoDb;
|
||||
try {
|
||||
centralRepoDb = CentralRepository.getInstance();
|
||||
filesType = centralRepo.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS
|
||||
throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_missingFileCorrAttrTypeErrMsg(), ex);
|
||||
}
|
||||
|
||||
try {
|
||||
filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttributeInstance.FILES_TYPE_ID);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
|
||||
throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
|
||||
}
|
||||
|
||||
try {
|
||||
eamCase = centralRepoDb.getCase(autopsyCase);
|
||||
} catch (CentralRepoException ex) {
|
||||
throw new IngestModuleException("Unable to get case from central repository database ", ex);
|
||||
}
|
||||
|
||||
try {
|
||||
eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource());
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS
|
||||
throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS
|
||||
}
|
||||
// TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter
|
||||
// if we are the first thread / module for this job, then make sure the case
|
||||
// and image exist in the DB before we associate artifacts with it.
|
||||
if (refCounter.incrementAndGet(jobId)
|
||||
== 1) {
|
||||
// ensure we have this data source in the EAM DB
|
||||
/*
|
||||
* The first module instance started for this job makes sure the current
|
||||
* case and data source are in the central repository. Currently
|
||||
* (11/8/21), these are cached upon creation / first retreival.
|
||||
*/
|
||||
if (refCounter.incrementAndGet(context.getJobId()) == 1) {
|
||||
Case currentCase;
|
||||
try {
|
||||
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
|
||||
centralRepoDb.newDataSource(eamDataSource);
|
||||
}
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_noCurrentCaseErrMsg(), ex);
|
||||
}
|
||||
|
||||
CorrelationCase centralRepoCase;
|
||||
try {
|
||||
centralRepoCase = centralRepo.getCase(currentCase);
|
||||
} catch (CentralRepoException ex) {
|
||||
logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS
|
||||
throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrCaseErrMsg(), ex);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a new "previously seen" artifact for the file marked bad.
|
||||
*
|
||||
* @param abstractFile The file from which to create an artifact.
|
||||
* @param caseDisplayNames Case names to be added to a TSK_COMMON attribute.
|
||||
*/
|
||||
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames, CorrelationAttributeInstance.Type aType, String value) {
|
||||
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
|
||||
String justification = "Previously marked as notable in cases " + prevCases;
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value),
|
||||
new BlackboardAttribute(
|
||||
TSK_OTHER_CASES, MODULE_NAME,
|
||||
prevCases));
|
||||
try {
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
|
||||
null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes)
|
||||
.getAnalysisResult();
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
// send inbox message
|
||||
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash(), caseDisplayNames);
|
||||
try {
|
||||
CorrelationDataSource.fromTSKDataSource(centralRepoCase, context.getDataSource());
|
||||
} catch (CentralRepoException ex) {
|
||||
throw new IngestModuleException(Bundle.CentralRepoIngestModule_cannotGetCrDataSourceErrMsg(), ex);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
|
||||
} catch (IllegalStateException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"CentralRepoIngestModule_notable_message_header=<html>A file in this data source was previously seen and tagged as Notable.<br>",
|
||||
"CentralRepoIngestModel_name_header=Name:<br>",
|
||||
"CentralRepoIngestModel_previous_case_header=<br>Previous Cases:<br>",
|
||||
"# {0} - Name of file that is Notable",
|
||||
"CentralRepoIngestModule_postToBB_knownBadMsg=Notable: {0}"
|
||||
})
|
||||
|
||||
/**
|
||||
* Post a message to the ingest inbox alerting the user that a bad file was
|
||||
* found.
|
||||
*
|
||||
* @param artifact badFile Blackboard Artifact
|
||||
* @param name badFile's name
|
||||
* @param md5Hash badFile's md5 hash
|
||||
* @param caseDisplayNames List of cases that the artifact appears in.
|
||||
*/
|
||||
private void sendBadFileInboxMessage(BlackboardArtifact artifact, String name, String md5Hash, List<String> caseDisplayNames) {
|
||||
StringBuilder detailsSb = new StringBuilder(1024);
|
||||
|
||||
detailsSb.append(Bundle.CentralRepoIngestModule_notable_message_header()).append(Bundle.CentralRepoIngestModel_name_header());
|
||||
detailsSb.append(name).append(Bundle.CentralRepoIngestModel_previous_case_header());
|
||||
for (String str : caseDisplayNames) {
|
||||
detailsSb.append(str).append("<br>");
|
||||
}
|
||||
detailsSb.append("</html>");
|
||||
services.postMessage(IngestMessage.createDataMessage(CentralRepoIngestModuleFactory.getModuleName(),
|
||||
Bundle.CentralRepoIngestModule_postToBB_knownBadMsg(name),
|
||||
detailsSb.toString(),
|
||||
name + md5Hash,
|
||||
artifact));
|
||||
}
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
|
||||
|
||||
@Override
|
||||
public DataArtifactIngestModule createDataArtifactIngestModule(IngestModuleIngestJobSettings settings) {
|
||||
return new CentralRepoDataArtifactIngestModule();
|
||||
return new CentralRepoDataArtifactIngestModule((IngestSettings) settings);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,336 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2021-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.AnalysisResult;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CORRELATION_VALUE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_OTHER_CASES;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Utility methods shared by the central repository ingest modules.
|
||||
*/
|
||||
class CentralRepoIngestModuleUtils {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(CentralRepoDataArtifactIngestModule.class.getName());
|
||||
private static final int MAX_PREV_CASES_FOR_NOTABLE_SCORE = 10;
|
||||
private static final int MAX_PREV_CASES_FOR_PREV_SEEN = 20;
|
||||
private final static String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
|
||||
|
||||
/**
|
||||
* Gets any previous occurrences of a given correlation attribute in cases
|
||||
* other than the current case.
|
||||
*
|
||||
* @param corrAttr The correlation attribute.
|
||||
*
|
||||
* @return The other occurrences of the correlation attribute.
|
||||
*/
|
||||
static List<CorrelationAttributeInstance> getOccurrencesInOtherCases(CorrelationAttributeInstance corrAttr, long ingestJobId) {
|
||||
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
|
||||
try {
|
||||
CentralRepository centralRepo = CentralRepository.getInstance();
|
||||
previousOccurrences = centralRepo.getArtifactInstancesByTypeValue(corrAttr.getCorrelationType(), corrAttr.getCorrelationValue());
|
||||
for (Iterator<CorrelationAttributeInstance> iterator = previousOccurrences.iterator(); iterator.hasNext();) {
|
||||
CorrelationAttributeInstance prevOccurrence = iterator.next();
|
||||
if (prevOccurrence.getCorrelationCase().getCaseUUID().equals(corrAttr.getCorrelationCase().getCaseUUID())) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error normalizing correlation attribute value for 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS
|
||||
} catch (CentralRepoException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error getting previous occurences of correlation attribute 's' (job ID=%d)", corrAttr, ingestJobId), ex); // NON-NLS
|
||||
}
|
||||
return previousOccurrences;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a previously notable analysis result for a content.
|
||||
*
|
||||
* @param content The content.
|
||||
* @param previousCases The names of the cases in which the artifact was
|
||||
* deemed notable.
|
||||
* @param corrAttrType The type of the matched correlation attribute.
|
||||
* @param corrAttrValue The value of the matched correlation attribute.
|
||||
* @param dataSourceObjId The data source object ID.
|
||||
* @param ingestJobId The ingest job ID.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_notableSetName=Previously Tagged As Notable (Central Repository)",
|
||||
"# {0} - list of cases",
|
||||
"CentralRepoIngestModule_notableJustification=Previously marked as notable in cases {0}"
|
||||
})
|
||||
static void makePrevNotableAnalysisResult(Content content, Set<String> previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
|
||||
String prevCases = previousCases.stream().collect(Collectors.joining(","));
|
||||
String justification = Bundle.CentralRepoIngestModule_notableJustification(prevCases);
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_notableSetName()),
|
||||
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
|
||||
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue),
|
||||
new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases));
|
||||
Optional<AnalysisResult> result = makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, attributes, "", Score.SCORE_NOTABLE, justification, dataSourceObjId, ingestJobId);
|
||||
if (result.isPresent()) {
|
||||
postNotableMessage(content, previousCases, corrAttrValue, result.get());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a previously seen analysis result for a content, unless the content
|
||||
* is too common.
|
||||
*
|
||||
* @param content The content.
|
||||
* @param previousCases The names of the cases in which the artifact was
|
||||
* previously seen.
|
||||
* @param corrAttrType The type of the matched correlation attribute.
|
||||
* @param corrAttrValue The value of the matched correlation attribute.
|
||||
* @param dataSourceObjId The data source object ID.
|
||||
* @param ingestJobId The ingest job ID.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_prevSeenSetName=Previously Seen (Central Repository)",
|
||||
"# {0} - list of cases",
|
||||
"CentralRepoIngestModule_prevSeenJustification=Previously seen in cases {0}"
|
||||
})
|
||||
static void makePrevSeenAnalysisResult(Content content, Set<String> previousCases, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
|
||||
Optional<Score> score = calculateScore(previousCases.size());
|
||||
if (score.isPresent()) {
|
||||
String prevCases = previousCases.stream().collect(Collectors.joining(","));
|
||||
String justification = Bundle.CentralRepoIngestModule_prevSeenJustification(prevCases);
|
||||
Collection<BlackboardAttribute> analysisResultAttributes = Arrays.asList(
|
||||
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, Bundle.CentralRepoIngestModule_prevSeenSetName()),
|
||||
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
|
||||
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue),
|
||||
new BlackboardAttribute(TSK_OTHER_CASES, MODULE_NAME, prevCases));
|
||||
makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, analysisResultAttributes, "", score.get(), justification, dataSourceObjId, ingestJobId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a previously unseen analysis result for a content.
|
||||
*
|
||||
* @param content The content.
|
||||
* @param corrAttrType The type of the new correlation attribute.
|
||||
* @param corrAttrValue The value of the new correlation attribute.
|
||||
* @param dataSourceObjId The data source object ID.
|
||||
* @param ingestJobId The ingest job ID.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_prevUnseenJustification=Previously seen in zero cases"
|
||||
})
|
||||
static void makePrevUnseenAnalysisResult(Content content, CorrelationAttributeInstance.Type corrAttrType, String corrAttrValue, long dataSourceObjId, long ingestJobId) {
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
|
||||
new BlackboardAttribute(TSK_CORRELATION_TYPE, MODULE_NAME, corrAttrType.getDisplayName()),
|
||||
new BlackboardAttribute(TSK_CORRELATION_VALUE, MODULE_NAME, corrAttrValue));
|
||||
makeAndPostAnalysisResult(content, BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, Bundle.CentralRepoIngestModule_prevUnseenJustification(), dataSourceObjId, ingestJobId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates a score based in a number of previous cases.
|
||||
*
|
||||
* @param numPreviousCases The number of previous cases.
|
||||
*
|
||||
* @return An Optional of a score, will be empty if there is no score
|
||||
* because the number of previous cases is too high, indicating a
|
||||
* common and therefore uninteresting item.
|
||||
*/
|
||||
static Optional<Score> calculateScore(int numPreviousCases) {
|
||||
Score score = null;
|
||||
if (numPreviousCases <= MAX_PREV_CASES_FOR_NOTABLE_SCORE) {
|
||||
score = Score.SCORE_LIKELY_NOTABLE;
|
||||
} else if (numPreviousCases > MAX_PREV_CASES_FOR_NOTABLE_SCORE && numPreviousCases <= MAX_PREV_CASES_FOR_PREV_SEEN) {
|
||||
score = Score.SCORE_NONE;
|
||||
}
|
||||
return Optional.ofNullable(score);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a new analysis result of a given type for a content and posts it to
|
||||
* the blackboard.
|
||||
*
|
||||
* @param content The content.
|
||||
* @param analysisResultType The type of analysis result to make.
|
||||
* @param analysisResultAttrs The attributes of the new analysis result.
|
||||
* @param configuration The configuration for the new analysis result.
|
||||
* @param score The score for the new analysis result.
|
||||
* @param justification The justification for the new analysis result.
|
||||
* @param dataSourceObjId The data source object ID.
|
||||
* @param ingestJobId The ingest job ID.
|
||||
*
|
||||
* @return The analysis result or null if the result already existed or an
|
||||
* error that prevented creation of the analysis result occurred.
|
||||
*/
|
||||
private static Optional<AnalysisResult> makeAndPostAnalysisResult(Content content, BlackboardArtifact.Type analysisResultType, Collection<BlackboardAttribute> analysisResultAttrs, String configuration, Score score, String justification, long dataSourceObjId, long ingestJobId) {
|
||||
AnalysisResult analysisResult = null;
|
||||
try {
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
if (!blackboard.artifactExists(content, analysisResultType, analysisResultAttrs)) {
|
||||
analysisResult = content.newAnalysisResult(analysisResultType, score, null, configuration, justification, analysisResultAttrs, dataSourceObjId).getAnalysisResult();
|
||||
try {
|
||||
blackboard.postArtifact(analysisResult, MODULE_NAME, ingestJobId);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error posting analysis result '%s' to blackboard for content 's' (job ID=%d)", analysisResult, content, ingestJobId), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, String.format("Error creating %s analysis result for content '%s' (job ID=%d)", analysisResultType, content, ingestJobId), ex); // NON-NLS
|
||||
}
|
||||
return Optional.ofNullable(analysisResult);
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts a message to the ingest messages inbox to notify the user that a
|
||||
* notable content has been found, i.e., a previously notable analysis
|
||||
* result has been created.
|
||||
*
|
||||
* @param content The notable content.
|
||||
* @param otherCases The other cases in which the content was marked as
|
||||
* notable.
|
||||
* @param corrAttrValue The correlation attribute value used to identify
|
||||
* the content, used by the ingest inbox as a unique
|
||||
* key for message grouping.
|
||||
* @param analysisResult The previously notable analysis result.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"# {0} - Name of item that is Notable",
|
||||
"CentralRepoIngestModule_notable_inbox_msg_subject=Notable: {0}"
|
||||
})
|
||||
private static void postNotableMessage(Content content, Set<String> otherCases, String corrAttrValue, AnalysisResult analysisResult) {
|
||||
String msgSubject = null;
|
||||
String msgDetails = null;
|
||||
String msgKey = corrAttrValue;
|
||||
if (content instanceof AbstractFile) {
|
||||
AbstractFile file = (AbstractFile) content;
|
||||
msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(file.getName());
|
||||
msgDetails = makeNotableFileMessage(file, otherCases);
|
||||
} else if (content instanceof DataArtifact) {
|
||||
DataArtifact artifact = (DataArtifact) content;
|
||||
msgSubject = Bundle.CentralRepoIngestModule_notable_inbox_msg_subject(artifact.getDisplayName());
|
||||
msgDetails = makeNotableDataArtifactMessage(artifact, corrAttrValue, otherCases);
|
||||
} else {
|
||||
LOGGER.log(Level.SEVERE, "Unsupported Content, cannot post ingest inbox message");
|
||||
}
|
||||
if (msgSubject != null && msgDetails != null) {
|
||||
IngestServices.getInstance().postMessage(
|
||||
IngestMessage.createDataMessage(
|
||||
MODULE_NAME,
|
||||
msgSubject,
|
||||
msgDetails,
|
||||
msgKey,
|
||||
analysisResult));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an ingest inbox message for a notable file. Uses similar HTML
|
||||
* markup as is used for this purpose by the hash lookup ingest module.
|
||||
*
|
||||
* @param file The notable file.
|
||||
* @param otherCases The cases other than the current case in which the file
|
||||
* was marked as nmotable.
|
||||
*
|
||||
* @return The message.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_filename_inbox_msg_header=File Name",
|
||||
"CentralRepoIngestModule_md5Hash_inbox_msg_header=MD5 Hash",
|
||||
"CentralRepoIngestModule_prev_cases_inbox_msg_header=Previous Cases"
|
||||
})
|
||||
private static String makeNotableFileMessage(AbstractFile file, Set<String> otherCases) {
|
||||
StringBuilder message = new StringBuilder(1024);
|
||||
message.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_filename_inbox_msg_header(), file.getName());
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_md5Hash_inbox_msg_header(), file.getMd5Hash());
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(",")));
|
||||
return message.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes an ingest inbox message for a notable data artifact. Uses similar
|
||||
* HTML markup as is used for this purpose by the hash lookup ingest module.
|
||||
*
|
||||
* @param artifact The data artifact
|
||||
* @param corrAttrValue The notable attribute (correlation attribute value).
|
||||
* @param otherCases The cases other than the current case in which the
|
||||
* artifact was marked as nmotable.
|
||||
*
|
||||
* @return The message.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CentralRepoIngestModule_artifact_type_inbox_msg_header=Artifact Type",
|
||||
"CentralRepoIngestModule_notable_attr_inbox_msg_header=Notable Attribute"
|
||||
})
|
||||
private static String makeNotableDataArtifactMessage(DataArtifact artifact, String corrAttrValue, Set<String> otherCases) {
|
||||
StringBuilder message = new StringBuilder(1024);
|
||||
message.append("<table border='0' cellpadding='4' width='280'>"); //NON-NLS
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_artifact_type_inbox_msg_header(), artifact.getDisplayName());
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_notable_attr_inbox_msg_header(), corrAttrValue);
|
||||
addTableRowMarkup(message, Bundle.CentralRepoIngestModule_prev_cases_inbox_msg_header(), otherCases.stream().collect(Collectors.joining(",")));
|
||||
message.append("</table>"); //NON-NLS
|
||||
return message.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a table row to a notable item message (HTML).
|
||||
*
|
||||
* @param message The string builder for the message.
|
||||
* @param headerText The table row header text.
|
||||
* @param cellText The table row cell text.
|
||||
*/
|
||||
private static void addTableRowMarkup(StringBuilder message, String headerText, String cellText) {
|
||||
message.append("<tr>"); //NON-NLS
|
||||
message.append("<th>").append(headerText).append("</th>"); //NON-NLS
|
||||
message.append("<td>").append(cellText).append("</td>"); //NON-NLS
|
||||
message.append("</tr>"); //NON-NLS
|
||||
}
|
||||
|
||||
/*
|
||||
* Prevents instatiation of this utility class.
|
||||
*/
|
||||
private CentralRepoIngestModuleUtils() {
|
||||
}
|
||||
|
||||
}
|
@ -26,7 +26,11 @@ import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
|
||||
final class IngestSettings implements IngestModuleIngestJobSettings {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = false;
|
||||
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = false;
|
||||
static final boolean DEFAULT_FLAG_UNIQUE_DEVICES = false;
|
||||
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
|
||||
|
||||
private final boolean flagTaggedNotableItems;
|
||||
private final boolean flagPreviousDevices;
|
||||
private final boolean createCorrelationProperties;
|
||||
@ -36,10 +40,10 @@ final class IngestSettings implements IngestModuleIngestJobSettings {
|
||||
* Instantiate the ingest job settings with default values.
|
||||
*/
|
||||
IngestSettings() {
|
||||
this.flagTaggedNotableItems = CentralRepoIngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS;
|
||||
this.flagPreviousDevices = CentralRepoIngestModule.DEFAULT_FLAG_PREVIOUS_DEVICES;
|
||||
this.createCorrelationProperties = CentralRepoIngestModule.DEFAULT_CREATE_CR_PROPERTIES;
|
||||
this.flagUniqueArtifacts = CentralRepoIngestModule.DEFAULT_FLAG_UNIQUE_DEVICES;
|
||||
this.flagTaggedNotableItems = DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS;
|
||||
this.flagPreviousDevices = DEFAULT_FLAG_PREVIOUS_DEVICES;
|
||||
this.createCorrelationProperties = DEFAULT_CREATE_CR_PROPERTIES;
|
||||
this.flagUniqueArtifacts = DEFAULT_FLAG_UNIQUE_DEVICES;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -33,10 +33,11 @@ public interface FileIngestModule extends IngestModule {
|
||||
* IMPORTANT: In addition to returning ProcessResult.OK or
|
||||
* ProcessResult.ERROR, modules should log all errors using methods provided
|
||||
* by the org.sleuthkit.autopsy.coreutils.Logger class. Log messages should
|
||||
* include the name and object ID of the data being processed. If an
|
||||
* exception has been caught by the module, the exception should be sent to
|
||||
* the Logger along with the log message so that a stack trace will appear
|
||||
* in the application log.
|
||||
* include the name and object ID of the data being processed and any other
|
||||
* information that would be useful for debugging. If an exception has been
|
||||
* caught by the module, the exception should be sent to the logger along
|
||||
* with the log message so that a stack trace will appear in the application
|
||||
* log.
|
||||
*
|
||||
* @param file The file to analyze.
|
||||
*
|
||||
|
@ -60,6 +60,11 @@ public interface IngestModule {
|
||||
* must also be taken into consideration when sharing resources between
|
||||
* module instances. See IngestModuleReferenceCounter.
|
||||
*
|
||||
* IMPORTANT: Start up IngestModuleException messages are displayed to the
|
||||
* user, if a user is present. Therefore, an exception to the policy that
|
||||
* exception messages are not localized is appropriate in this method. Also,
|
||||
* the exception messages should be user-friendly.
|
||||
*
|
||||
* @param context Provides data and services specific to the ingest job and
|
||||
* the ingest pipeline of which the module is a part.
|
||||
*
|
||||
|
@ -69,7 +69,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
|
||||
@ -327,8 +326,7 @@ class SevenZipExtractor {
|
||||
TSK_COMMENT, MODULE_NAME,
|
||||
details));
|
||||
|
||||
if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_ITEM, attributes)) {
|
||||
|
||||
if (!blackboard.artifactExists(archiveFile, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) {
|
||||
BlackboardArtifact artifact = rootArchive.getArchiveFile().newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE,
|
||||
null, setName, null,
|
||||
|
@ -36,7 +36,6 @@ import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFil
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
@ -194,7 +193,7 @@ public class FileTypeIdIngestModule implements FileIngestModule {
|
||||
|
||||
Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) {
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) {
|
||||
BlackboardArtifact artifact = file.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE,
|
||||
null, fileType.getInterestingFilesSetName(), null,
|
||||
|
@ -439,7 +439,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
attributesList.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_SET_NAME, HashLookupModuleFactory.getModuleName(), db.getDisplayName()));
|
||||
try {
|
||||
Blackboard tskBlackboard = skCase.getBlackboard();
|
||||
if (tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT, attributesList) == false) {
|
||||
if (tskBlackboard.artifactExists(file, BlackboardArtifact.Type.TSK_HASHSET_HIT, attributesList) == false) {
|
||||
postHashSetHitToBlackboard(file, file.getMd5Hash(), db, comment);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
|
@ -39,7 +39,6 @@ import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ITEM;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
@ -142,7 +141,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
);
|
||||
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!blackboard.artifactExists(file, TSK_INTERESTING_ITEM, attributes)) {
|
||||
if (!blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_INTERESTING_ITEM, attributes)) {
|
||||
BlackboardArtifact artifact = file.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_INTERESTING_ITEM, Score.SCORE_LIKELY_NOTABLE,
|
||||
null, filesSet.getName(), null,
|
||||
|
@ -48,7 +48,6 @@ import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.modules.pictureanalyzer.PictureAnalyzerIngestModuleFactory;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -68,7 +67,6 @@ import org.sleuthkit.datamodel.Score;
|
||||
public class EXIFProcessor implements PictureProcessor {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(EXIFProcessor.class.getName());
|
||||
private static final BlackboardArtifact.Type EXIF_METADATA = new BlackboardArtifact.Type(TSK_METADATA_EXIF);
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages({
|
||||
@ -151,7 +149,7 @@ public class EXIFProcessor implements PictureProcessor {
|
||||
|
||||
final Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
|
||||
if (!attributes.isEmpty() && !blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) {
|
||||
if (!attributes.isEmpty() && !blackboard.artifactExists(file, BlackboardArtifact.Type.TSK_METADATA_EXIF, attributes)) {
|
||||
List<BlackboardArtifact> artifacts = new ArrayList<>();
|
||||
final BlackboardArtifact exifArtifact = (file.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_METADATA_EXIF,
|
||||
|
@ -65,15 +65,19 @@ DayOfTheWeekRenderer_Tuesday_Label=Tuesday
|
||||
DayOfTheWeekRenderer_Wednesday_Label=Wednesday
|
||||
GeneralOptionsPanelController.moduleErr.msg=A module caused an error listening to GeneralOptionsPanelController updates. See log to determine which module. Some data could be incomplete.
|
||||
GeneralOptionsPanelController.moduleErr=Module Error
|
||||
# {0} - errorMessage
|
||||
MultiUserTestTool.criticalError=Critical error running data source processor on test data source: {0}
|
||||
MultiUserTestTool.errorStartingIngestJob=Ingest manager error while starting ingest job
|
||||
# {0} - cancellationReason
|
||||
MultiUserTestTool.ingestCancelled=Ingest cancelled due to {0}
|
||||
MultiUserTestTool.ingestSettingsError=Failed to analyze data source due to ingest settings errors
|
||||
MultiUserTestTool.noContent=Test data source failed to produce content
|
||||
# {0} - serviceName
|
||||
MultiUserTestTool.serviceDown=Multi User service is down: {0}
|
||||
MultiUserTestTool.startupError=Failed to analyze data source due to ingest job startup error
|
||||
MultiUserTestTool.unableAddFileAsDataSource=Unable to add test file as data source to case
|
||||
MultiUserTestTool.unableCreatFile=Unable to create a file in case output directory
|
||||
# {0} - serviceName
|
||||
MultiUserTestTool.unableToCheckService=Unable to check Multi User service state: {0}
|
||||
MultiUserTestTool.unableToCreateCase=Unable to create case
|
||||
MultiUserTestTool.unableToInitializeDatabase=Case database was not successfully initialized
|
||||
|
Loading…
x
Reference in New Issue
Block a user