Merge branch 'develop' into 1408-add-waypoint-list-to-route-artifacts

This commit is contained in:
Kelly Kelly 2020-02-26 15:55:14 -05:00
commit 3dd98271da
67 changed files with 3075 additions and 1786 deletions

View File

@ -345,6 +345,7 @@
<package>org.sleuthkit.autopsy.report</package>
<package>org.sleuthkit.autopsy.textextractors</package>
<package>org.sleuthkit.autopsy.textextractors.configs</package>
<package>org.sleuthkit.autopsy.textsummarizer</package>
<package>org.sleuthkit.autopsy.texttranslation</package>
<package>org.sleuthkit.datamodel</package>
<package>org.sleuthkit.datamodel.blackboardutils</package>

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -61,10 +61,10 @@ public final class AddEditCentralRepoCommentAction extends AbstractAction {
*/
public AddEditCentralRepoCommentAction(AbstractFile file) {
fileId = file.getId();
correlationAttributeInstance = CorrelationAttributeUtil.getInstanceFromContent(file);
correlationAttributeInstance = CorrelationAttributeUtil.getCorrAttrForFile(file);
if (correlationAttributeInstance == null) {
addToDatabase = true;
correlationAttributeInstance = CorrelationAttributeUtil.makeInstanceFromContent(file);
correlationAttributeInstance = CorrelationAttributeUtil.makeCorrAttrFromFile(file);
}
if (file.getSize() == 0) {
putValue(Action.NAME, Bundle.AddEditCentralRepoCommentAction_menuItemText_addEditCentralRepoCommentEmptyFile());

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2017-2019 Basis Technology Corp.
* Copyright 2017-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -464,7 +464,7 @@ public class DataContentViewerOtherCases extends JPanel implements DataContentVi
// correlate on blackboard artifact attributes if they exist and supported
BlackboardArtifact bbArtifact = getBlackboardArtifactFromNode(node);
if (bbArtifact != null && CentralRepository.isEnabled()) {
ret.addAll(CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(bbArtifact, false));
ret.addAll(CorrelationAttributeUtil.makeCorrAttrsFromArtifact(bbArtifact));
}
// we can correlate based on the MD5 if it is enabled

View File

@ -803,4 +803,13 @@ public interface CentralRepository {
* @throws CentralRepoException
*/
public void processSelectClause(String selectClause, InstanceTableCallback instanceTableCallback) throws CentralRepoException;
/**
* Returns list of all correlation types.
*
* @return list of Correlation types
* @throws CentralRepoException
*/
List<CorrelationAttributeInstance.Type> getCorrelationTypes() throws CentralRepoException;
}

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2018 Basis Technology Corp.
* Copyright 2015-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -24,6 +24,7 @@ import java.util.List;
import java.util.Objects;
import java.util.regex.Pattern;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.TskData;
/**
@ -221,6 +222,9 @@ public class CorrelationAttributeInstance implements Serializable {
public static final int IMSI_TYPE_ID = 8;
public static final int ICCID_TYPE_ID = 9;
// An offset to assign Ids for additional correlation types.
public static final int ADDITIONAL_TYPES_BASE_ID = 1000;
/**
* Load the default correlation types
*
@ -238,18 +242,30 @@ public class CorrelationAttributeInstance implements Serializable {
"CorrelationType.IMSI.displayName=IMSI Number",
"CorrelationType.ICCID.displayName=ICCID Number"})
public static List<CorrelationAttributeInstance.Type> getDefaultCorrelationTypes() throws CentralRepoException {
List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = new ArrayList<>();
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(FILES_TYPE_ID, Bundle.CorrelationType_FILES_displayName(), "file", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(DOMAIN_TYPE_ID, Bundle.CorrelationType_DOMAIN_displayName(), "domain", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(EMAIL_TYPE_ID, Bundle.CorrelationType_EMAIL_displayName(), "email_address", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(PHONE_TYPE_ID, Bundle.CorrelationType_PHONE_displayName(), "phone_number", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(USBID_TYPE_ID, Bundle.CorrelationType_USBID_displayName(), "usb_devices", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(SSID_TYPE_ID, Bundle.CorrelationType_SSID_displayName(), "wireless_networks", true, true)); // NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(MAC_TYPE_ID, Bundle.CorrelationType_MAC_displayName(), "mac_address", true, true)); //NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(IMEI_TYPE_ID, Bundle.CorrelationType_IMEI_displayName(), "imei_number", true, true)); //NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(IMSI_TYPE_ID, Bundle.CorrelationType_IMSI_displayName(), "imsi_number", true, true)); //NON-NLS
DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(ICCID_TYPE_ID, Bundle.CorrelationType_ICCID_displayName(), "iccid_number", true, true)); //NON-NLS
return DEFAULT_CORRELATION_TYPES;
List<CorrelationAttributeInstance.Type> defaultCorrelationTypes = new ArrayList<>();
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(FILES_TYPE_ID, Bundle.CorrelationType_FILES_displayName(), "file", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(DOMAIN_TYPE_ID, Bundle.CorrelationType_DOMAIN_displayName(), "domain", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(EMAIL_TYPE_ID, Bundle.CorrelationType_EMAIL_displayName(), "email_address", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(PHONE_TYPE_ID, Bundle.CorrelationType_PHONE_displayName(), "phone_number", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(USBID_TYPE_ID, Bundle.CorrelationType_USBID_displayName(), "usb_devices", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(SSID_TYPE_ID, Bundle.CorrelationType_SSID_displayName(), "wireless_networks", true, true)); // NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(MAC_TYPE_ID, Bundle.CorrelationType_MAC_displayName(), "mac_address", true, true)); //NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(IMEI_TYPE_ID, Bundle.CorrelationType_IMEI_displayName(), "imei_number", true, true)); //NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(IMSI_TYPE_ID, Bundle.CorrelationType_IMSI_displayName(), "imsi_number", true, true)); //NON-NLS
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(ICCID_TYPE_ID, Bundle.CorrelationType_ICCID_displayName(), "iccid_number", true, true)); //NON-NLS
// Create Correlation Types for Accounts.
int correlationTypeId = ADDITIONAL_TYPES_BASE_ID;
for (Account.Type type : Account.Type.PREDEFINED_ACCOUNT_TYPES) {
// Skip Phone and Email accounts as there are already Correlation types defined for those.
if (type != Account.Type.EMAIL && type != Account.Type.PHONE) {
defaultCorrelationTypes.add(new CorrelationAttributeInstance.Type(correlationTypeId, type.getDisplayName(), type.getTypeName().toLowerCase(), true, true)); //NON-NLS
correlationTypeId++;
}
}
return defaultCorrelationTypes;
}
/**

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2020 Basis Technology Corp.
* Copyright 2017-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -30,176 +30,280 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
/**
* Utility class for correlation attributes in the central repository
* Utility class for working with correlation attributes in the central
* repository.
*/
public class CorrelationAttributeUtil {
private static final Logger logger = Logger.getLogger(CorrelationAttributeUtil.class.getName());
@Messages({"EamArtifactUtil.emailaddresses.text=Email Addresses"})
public static String getEmailAddressAttrString() {
return Bundle.EamArtifactUtil_emailaddresses_text();
/**
* Gets a string that is expected to be the same string that is stored in
* the correlation_types table in the central repository as the display name
* for the email address correlation attribute type. This string is
* duplicated in the CorrelationAttributeInstance class.
*
* TODO (Jira-6088): We should not have multiple deifnitions of this string.
*
* @return The display name of the email address correlation attribute type.
*/
@Messages({"CorrelationAttributeUtil.emailaddresses.text=Email Addresses"})
private static String getEmailAddressAttrDisplayName() {
return Bundle.CorrelationAttributeUtil_emailaddresses_text();
}
/**
* Static factory method to examine a BlackboardArtifact to determine if it
* has contents that can be used for Correlation. If so, return a
* EamArtifact with a single EamArtifactInstance within. If not, return
* null.
* Makes zero to many correlation attribute instances from the attributes of
* an artifact.
*
* @param artifact BlackboardArtifact to examine
* @param checkEnabled If true, only create a CorrelationAttribute if it is
* enabled
* IMPORTANT: The correlation attribute instances are NOT added to the
* central repository by this method.
*
* @return List of EamArtifacts
* TODO (Jira-6088): The methods in this low-level, utility class should
* throw exceptions instead of logging them. The reason for this is that the
* clients of the utility class, not the utility class itself, should be in
* charge of error handling policy, per the Autopsy Coding Standard. Note
* that clients of several of these methods currently cannot determine
* whether receiving a null return value is an error or not, plus null
* checking is easy to forget, while catching exceptions is enforced.
*
* @param artifact An artifact.
*
* @return A list, possibly empty, of correlation attribute instances for
* the artifact.
*/
public static List<CorrelationAttributeInstance> makeInstancesFromBlackboardArtifact(BlackboardArtifact artifact,
boolean checkEnabled) {
List<CorrelationAttributeInstance> eamArtifacts = new ArrayList<>();
public static List<CorrelationAttributeInstance> makeCorrAttrsFromArtifact(BlackboardArtifact artifact) {
List<CorrelationAttributeInstance> correlationAttrs = new ArrayList<>();
try {
BlackboardArtifact artifactForInstance = null;
if (BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() == artifact.getArtifactTypeID()) {
// Get the associated artifactForInstance
BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT));
if (attribute != null) {
artifactForInstance = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifact(attribute.getValueLong());
}
} else {
artifactForInstance = artifact;
}
if (artifactForInstance != null) {
int artifactTypeID = artifactForInstance.getArtifactTypeID();
BlackboardArtifact sourceArtifact = getCorrAttrSourceArtifact(artifact);
if (sourceArtifact != null) {
int artifactTypeID = sourceArtifact.getArtifactTypeID();
if (artifactTypeID == ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
BlackboardAttribute setNameAttr = artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNameAttr != null
&& CorrelationAttributeUtil.getEmailAddressAttrString().equals(setNameAttr.getValueString())) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD, CorrelationAttributeInstance.EMAIL_TYPE_ID);
BlackboardAttribute setNameAttr = sourceArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME));
if (setNameAttr != null && CorrelationAttributeUtil.getEmailAddressAttrDisplayName().equals(setNameAttr.getValueString())) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD, CorrelationAttributeInstance.EMAIL_TYPE_ID);
}
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_WEB_COOKIE.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_WEB_DOWNLOAD.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, CorrelationAttributeInstance.DOMAIN_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, CorrelationAttributeInstance.DOMAIN_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_CONTACT.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_CALLLOG.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_MESSAGE.getTypeID()) {
makeCorrAttrFromArtifactPhoneAttr(sourceArtifact);
String value = null;
if (null != artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER))) {
value = artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER)).getValueString();
} else if (null != artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM))) {
value = artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM)).getValueString();
} else if (null != artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO))) {
value = artifactForInstance.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO)).getValueString();
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID, CorrelationAttributeInstance.USBID_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_MAC_ADDRESS, CorrelationAttributeInstance.MAC_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID, CorrelationAttributeInstance.SSID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WIFI_NETWORK_ADAPTER.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_BLUETOOTH_ADAPTER.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_MAC_ADDRESS, CorrelationAttributeInstance.MAC_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_DEVICE_INFO.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMEI, CorrelationAttributeInstance.IMEI_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMSI, CorrelationAttributeInstance.IMSI_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ICCID, CorrelationAttributeInstance.ICCID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_SIM_ATTACHED.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMSI, CorrelationAttributeInstance.IMSI_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ICCID, CorrelationAttributeInstance.ICCID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS.getTypeID()) {
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, CorrelationAttributeInstance.PHONE_TYPE_ID);
makeCorrAttrFromArtifactAttr(correlationAttrs, sourceArtifact, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL, CorrelationAttributeInstance.EMAIL_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()) {
makeCorrAttrFromAcctArtifact(correlationAttrs, sourceArtifact);
}
// Remove all non-numeric symbols to semi-normalize phone numbers, preserving leading "+" character
}
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", artifact), ex); // NON-NLS
return correlationAttrs;
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Error getting querying case database (%s)", artifact), ex); // NON-NLS
return correlationAttrs;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Error getting current case", ex); // NON-NLS
return correlationAttrs;
}
return correlationAttrs;
}
/**
* Gets the associated artifact of a "meta-artifact" such as an interesting
* artifact hit artifact.
*
* @param artifact An artifact.
*
* @return The associated artifact if the input artifact is a
* "meta-artifact", otherwise the input artifact.
*
* @throws NoCurrentCaseException If there is no open case.
* @throws TskCoreException If there is an error querying thew case
* database.
*/
private static BlackboardArtifact getCorrAttrSourceArtifact(BlackboardArtifact artifact) throws NoCurrentCaseException, TskCoreException {
BlackboardArtifact sourceArtifact = null;
if (BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() == artifact.getArtifactTypeID()) {
BlackboardAttribute assocArtifactAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT));
if (assocArtifactAttr != null) {
sourceArtifact = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifact(assocArtifactAttr.getValueLong());
}
} else {
sourceArtifact = artifact;
}
return sourceArtifact;
}
/**
* Makes a correlation attribute instance from a phone number attribute of an
* artifact.
*
* @param artifact An artifact with a phone number attribute.
*
* @return The correlation instance artifact or null, if the phone number is
* not a valid correlation attribute.
*
* @throws TskCoreException If there is an error querying the case
* database.
* @throws CentralRepoException If there is an error querying the central
* repository.
*/
private static CorrelationAttributeInstance makeCorrAttrFromArtifactPhoneAttr(BlackboardArtifact artifact) throws TskCoreException, CentralRepoException {
CorrelationAttributeInstance corrAttr = null;
/*
* Extract the phone number from the artifact attribute.
*/
String value = null;
if (null != artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER))) {
value = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER)).getValueString();
} else if (null != artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM))) {
value = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM)).getValueString();
} else if (null != artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO))) {
value = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO)).getValueString();
}
/*
* Normalize the phone number.
*/
if (value != null) {
String newValue = value.replaceAll("\\D", "");
if (value.startsWith("+")) {
newValue = "+" + newValue;
}
value = newValue;
// Only add the correlation attribute if the resulting phone number large enough to be of use
// (these 3-5 digit numbers can be valid, but are not useful for correlation)
/*
* Validate the phone number. Three to five digit phone numbers may
* be valid, but they are too short to use as correlation
* attributes.
*/
if (value.length() > 5) {
CorrelationAttributeInstance inst = makeCorrelationAttributeInstanceUsingTypeValue(artifactForInstance, CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.PHONE_TYPE_ID), value);
if (inst != null) {
eamArtifacts.add(inst);
corrAttr = makeCorrAttr(artifact, CentralRepository.getInstance().getCorrelationTypeById(CorrelationAttributeInstance.PHONE_TYPE_ID), value);
}
}
}
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID, CorrelationAttributeInstance.USBID_TYPE_ID);
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_MAC_ADDRESS, CorrelationAttributeInstance.MAC_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID, CorrelationAttributeInstance.SSID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WIFI_NETWORK_ADAPTER.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING.getTypeID()
|| artifactTypeID == ARTIFACT_TYPE.TSK_BLUETOOTH_ADAPTER.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_MAC_ADDRESS, CorrelationAttributeInstance.MAC_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_DEVICE_INFO.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMEI, CorrelationAttributeInstance.IMEI_TYPE_ID);
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMSI, CorrelationAttributeInstance.IMSI_TYPE_ID);
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ICCID, CorrelationAttributeInstance.ICCID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_SIM_ATTACHED.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_IMSI, CorrelationAttributeInstance.IMSI_TYPE_ID);
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ICCID, CorrelationAttributeInstance.ICCID_TYPE_ID);
} else if (artifactTypeID == ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS.getTypeID()) {
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, CorrelationAttributeInstance.PHONE_TYPE_ID);
addCorrelationAttributeToList(eamArtifacts, artifactForInstance, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL, CorrelationAttributeInstance.EMAIL_TYPE_ID);
}
}
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Error getting defined correlation types.", ex); // NON-NLS
return eamArtifacts;
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting attribute while getting type from BlackboardArtifact.", ex); // NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); // NON-NLS
return null;
}
return eamArtifacts;
return corrAttr;
}
/**
* Add a CorrelationAttributeInstance of the specified type to the provided
* list if the artifactForInstance has an Attribute of the given type with a
* non empty value.
* Makes a correlation attribute instance for an account artifact.
*
* @param eamArtifacts the list of CorrelationAttributeInstance objects
* which should be added to
* @param artifact the blackboard artifactForInstance which we are
* creating a CorrelationAttributeInstance for
* @param bbAttributeType the type of BlackboardAttribute we expect to exist
* for a CorrelationAttributeInstance of this type
* generated from this Blackboard Artifact
* @param typeId the integer type id of the
* CorrelationAttributeInstance type
* IMPORTANT: The correlation attribute instance is NOT added to the central
* repository by this method.
*
* @throws CentralRepoException
* @throws TskCoreException
* TODO (Jira-6088): The methods in this low-level, utility class should
* throw exceptions instead of logging them. The reason for this is that the
* clients of the utility class, not the utility class itself, should be in
* charge of error handling policy, per the Autopsy Coding Standard. Note
* that clients of several of these methods currently cannot determine
* whether receiving a null return value is an error or not, plus null
* checking is easy to forget, while catching exceptions is enforced.
*
* @param corrAttrInstances A list of correlation attribute instances.
* @param acctArtifact An account artifact.
*
* @return The correlation attribute instance.
*/
private static void addCorrelationAttributeToList(List<CorrelationAttributeInstance> eamArtifacts, BlackboardArtifact artifact, ATTRIBUTE_TYPE bbAttributeType, int typeId) throws CentralRepoException, TskCoreException {
BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(bbAttributeType));
private static void makeCorrAttrFromAcctArtifact(List<CorrelationAttributeInstance> corrAttrInstances, BlackboardArtifact acctArtifact) {
// RAMAN TODO: Convert TSK_ACCOUNT_TYPE attribute to correlation attribute type
// RAMAN TODO: Extract TSK_ID as value
// CorrelationAttributeInstance corrAttr = makeCorrAttr(acctArtifact, corrType, corrAttrValue);
// if (corrAttr != null) {
// corrAttrInstances.add(corrAttr);
// }
}
/**
* Makes a correlation attribute instance from a specified attribute of an
* artifact. The correlation attribute instance is added to an input list.
*
* @param corrAttrInstances A list of correlation attribute instances.
* @param artifact An artifact.
* @param artAttrType The type of the atrribute of the artifact that
* is to be made into a correlatin attribute
* instance.
* @param typeId The type ID for the desired correlation
* attribute instance.
*
* @throws CentralRepoException If there is an error querying the central
* repository.
* @throws TskCoreException If there is an error querying the case
* database.
*/
private static void makeCorrAttrFromArtifactAttr(List<CorrelationAttributeInstance> corrAttrInstances, BlackboardArtifact artifact, ATTRIBUTE_TYPE artAttrType, int typeId) throws CentralRepoException, TskCoreException {
BlackboardAttribute attribute = artifact.getAttribute(new BlackboardAttribute.Type(artAttrType));
if (attribute != null) {
String value = attribute.getValueString();
if ((null != value) && (value.isEmpty() == false)) {
CorrelationAttributeInstance inst = makeCorrelationAttributeInstanceUsingTypeValue(artifact, CentralRepository.getInstance().getCorrelationTypeById(typeId), value);
CorrelationAttributeInstance inst = makeCorrAttr(artifact, CentralRepository.getInstance().getCorrelationTypeById(typeId), value);
if (inst != null) {
eamArtifacts.add(inst);
corrAttrInstances.add(inst);
}
}
}
}
/**
* Uses the determined type and vallue, then looks up instance details to
* create proper CorrelationAttributeInstance.
* Makes a correlation attribute instance of a given type from an artifact.
*
* @param bbArtifact the blackboard artifactForInstance
* @param correlationType the given type
* @param value the artifactForInstance value
* @param artifact The artifact.
* @param correlationType the correlation attribute type.
* @param value The correlation attribute value.
*
* @return CorrelationAttributeInstance from details, or null if validation
* failed or another error occurred
* TODO (Jira-6088): The methods in this low-level, utility class should
* throw exceptions instead of logging them. The reason for this is that the
* clients of the utility class, not the utility class itself, should be in
* charge of error handling policy, per the Autopsy Coding Standard. Note
* that clients of several of these methods currently cannot determine
* whether receiving a null return value is an error or not, plus null
* checking is easy to forget, while catching exceptions is enforced.
*
* @return The correlation attribute instance or null, if an error occurred.
*/
private static CorrelationAttributeInstance makeCorrelationAttributeInstanceUsingTypeValue(BlackboardArtifact bbArtifact, CorrelationAttributeInstance.Type correlationType, String value) {
private static CorrelationAttributeInstance makeCorrAttr(BlackboardArtifact artifact, CorrelationAttributeInstance.Type correlationType, String value) {
try {
Case currentCase = Case.getCurrentCaseThrows();
AbstractFile bbSourceFile = currentCase.getSleuthkitCase().getAbstractFileById(bbArtifact.getObjectID());
AbstractFile bbSourceFile = currentCase.getSleuthkitCase().getAbstractFileById(artifact.getObjectID());
if (null == bbSourceFile) {
logger.log(Level.SEVERE, "Error creating artifact instance. Abstract File was null."); // NON-NLS
return null;
}
// make an instance for the BB source file
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
return new CorrelationAttributeInstance(
correlationType,
@ -212,31 +316,34 @@ public class CorrelationAttributeUtil {
bbSourceFile.getId());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting AbstractFile for artifact: " + bbArtifact.toString(), ex); // NON-NLS
logger.log(Level.SEVERE, String.format("Error getting querying case database (%s)", artifact), ex); // NON-NLS
return null;
} catch (CentralRepoException | CorrelationAttributeNormalizationException ex) {
logger.log(Level.WARNING, "Error creating artifact instance for artifact: " + bbArtifact.toString(), ex); // NON-NLS
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", artifact), ex); // NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Case is closed.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error getting current case", ex); // NON-NLS
return null;
}
}
/**
* Retrieve CorrelationAttribute from the given Content.
* Gets the correlation attribute instance for a file.
*
* @param content The content object
* @param file The file.
*
* @return The new CorrelationAttribute, or null if retrieval failed.
* TODO (Jira-6088): The methods in this low-level, utility class should
* throw exceptions instead of logging them. The reason for this is that the
* clients of the utility class, not the utility class itself, should be in
* charge of error handling policy, per the Autopsy Coding Standard. Note
* that clients of several of these methods currently cannot determine
* whether receiving a null return value is an error or not, plus null
* checking is easy to forget, while catching exceptions is enforced.
*
* @return The correlation attribute instance or null, if no such
* correlation attribute instance was found or an error occurred.
*/
public static CorrelationAttributeInstance getInstanceFromContent(Content content) {
if (!(content instanceof AbstractFile)) {
return null;
}
final AbstractFile file = (AbstractFile) content;
public static CorrelationAttributeInstance getCorrAttrForFile(AbstractFile file) {
if (!isSupportedAbstractFileType(file)) {
return null;
@ -254,11 +361,14 @@ public class CorrelationAttributeUtil {
return null;
}
correlationDataSource = CorrelationDataSource.fromTSKDataSource(correlationCase, file.getDataSource());
} catch (TskCoreException | CentralRepoException ex) {
logger.log(Level.SEVERE, "Error retrieving correlation attribute.", ex);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Error getting querying case database (%s)", file), ex); // NON-NLS
return null;
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", file), ex); // NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Case is closed.", ex);
logger.log(Level.SEVERE, "Error getting current case", ex); // NON-NLS
return null;
}
@ -266,20 +376,22 @@ public class CorrelationAttributeUtil {
try {
correlationAttributeInstance = CentralRepository.getInstance().getCorrelationAttributeInstance(type, correlationCase, correlationDataSource, file.getId());
} catch (CentralRepoException | CorrelationAttributeNormalizationException ex) {
logger.log(Level.WARNING, String.format(
"Correlation attribute could not be retrieved for '%s' (id=%d): ",
content.getName(), content.getId()), ex);
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", file), ex); // NON-NLS
return null;
}
//if there was no correlation attribute found for the item using object_id then check for attributes added with schema 1,1 which lack object_id
/*
* If no correlation attribute instance was found when querying by file
* object ID, try searching by file path instead. This is necessary
* because file object IDs were not stored in the central repository in
* early versions of its schema.
*/
if (correlationAttributeInstance == null && file.getMd5Hash() != null) {
String filePath = (file.getParentPath() + file.getName()).toLowerCase();
try {
correlationAttributeInstance = CentralRepository.getInstance().getCorrelationAttributeInstance(type, correlationCase, correlationDataSource, file.getMd5Hash(), filePath);
} catch (CentralRepoException | CorrelationAttributeNormalizationException ex) {
logger.log(Level.WARNING, String.format(
"Correlation attribute could not be retrieved for '%s' (id=%d): ",
content.getName(), content.getId()), ex);
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", file), ex); // NON-NLS
return null;
}
}
@ -288,32 +400,31 @@ public class CorrelationAttributeUtil {
}
/**
* Create an EamArtifact from the given Content. Will return null if an
* artifactForInstance can not be created - this is not necessarily an error
* case, it just means an artifactForInstance can't be made. If creation
* fails due to an error (and not that the file is the wrong type or it has
* no hash), the error will be logged before returning.
* Makes a correlation attribute instance for a file.
*
* Does not add the artifactForInstance to the database.
* IMPORTANT: The correlation attribute instance is NOT added to the central
* repository by this method.
*
* @param content The content object
* TODO (Jira-6088): The methods in this low-level, utility class should
* throw exceptions instead of logging them. The reason for this is that the
* clients of the utility class, not the utility class itself, should be in
* charge of error handling policy, per the Autopsy Coding Standard. Note
* that clients of several of these methods currently cannot determine
* whether receiving a null return value is an error or not, plus null
* checking is easy to forget, while catching exceptions is enforced.
*
* @return The new EamArtifact or null if creation failed
* @param file The file.
*
* @return The correlation attribute instance or null, if an error occurred.
*/
public static CorrelationAttributeInstance makeInstanceFromContent(Content content) {
public static CorrelationAttributeInstance makeCorrAttrFromFile(AbstractFile file) {
if (!(content instanceof AbstractFile)) {
if (!isSupportedAbstractFileType(file)) {
return null;
}
final AbstractFile af = (AbstractFile) content;
if (!isSupportedAbstractFileType(af)) {
return null;
}
// We need a hash to make the artifactForInstance
String md5 = af.getMd5Hash();
// We need a hash to make the correlation artifact instance.
String md5 = file.getMd5Hash();
if (md5 == null || md5.isEmpty() || HashUtility.isNoDataMd5(md5)) {
return null;
}
@ -324,31 +435,33 @@ public class CorrelationAttributeUtil {
CorrelationCase correlationCase = CentralRepository.getInstance().getCase(Case.getCurrentCaseThrows());
return new CorrelationAttributeInstance(
filesType,
af.getMd5Hash(),
file.getMd5Hash(),
correlationCase,
CorrelationDataSource.fromTSKDataSource(correlationCase, af.getDataSource()),
af.getParentPath() + af.getName(),
CorrelationDataSource.fromTSKDataSource(correlationCase, file.getDataSource()),
file.getParentPath() + file.getName(),
"",
TskData.FileKnown.UNKNOWN,
af.getId());
file.getId());
} catch (TskCoreException | CentralRepoException | CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, "Error making correlation attribute.", ex);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Error querying case database (%s)", file), ex); // NON-NLS
return null;
} catch (CentralRepoException | CorrelationAttributeNormalizationException ex) {
logger.log(Level.SEVERE, String.format("Error querying central repository (%s)", file), ex); // NON-NLS
return null;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Case is closed.", ex);
logger.log(Level.SEVERE, "Error getting current case", ex); // NON-NLS
return null;
}
}
/**
* Check whether the given abstract file should be processed for the central
* repository.
* Checks whether or not a file is of a type that can be added to the
* central repository as a correlation attribute instance.
*
* @param file The file to test
* @param file A file.
*
* @return true if the file should be added to the central repo, false
* otherwise
* @return True or false.
*/
public static boolean isSupportedAbstractFileType(AbstractFile file) {
if (file == null) {
@ -375,9 +488,9 @@ public class CorrelationAttributeUtil {
}
/**
* Constructs a new EamArtifactUtil
* Prevent instantiation of this utility class.
*/
private CorrelationAttributeUtil() {
//empty constructor
}
}

View File

@ -0,0 +1,87 @@
/*
* Central Repository
*
* Copyright 2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.datamodel;
/**
* This class abstracts a persona.
*
* An examiner may create a persona from an account.
*
*/
class Persona {
/**
* Defines level of confidence in assigning a persona to an account.
*/
public enum Confidence {
UNKNOWN(1, "Unknown"),
LOW(2, "Low confidence"),
MEDIUM(3, "Medium confidence"),
HIGH(4, "High confidence"),
DERIVED(5, "Derived directly");
private final String name;
private final int level_id;
Confidence(int level, String name) {
this.name = name;
this.level_id = level;
}
@Override
public String toString() {
return name;
}
public int getLevel() {
return this.level_id;
}
}
/**
* Defines status of a persona.
*/
public enum PersonaStatus {
UNKNOWN(1, "Unknown"),
ACTIVE(2, "Active"),
MERGED(3, "Merged"),
SPLIT(4, "Split"),
DELETED(5, "Deleted");
private final String description;
private final int status_id;
PersonaStatus(int status, String description) {
this.status_id = status;
this.description = description;
}
@Override
public String toString() {
return description;
}
public int getStatus() {
return this.status_id;
}
}
}

View File

@ -131,7 +131,9 @@ final class PostgresCentralRepo extends RdbmsCentralRepo {
CentralRepoDbUtil.closeConnection(conn);
}
dbSettings.insertDefaultDatabaseContent();
RdbmsCentralRepoFactory centralRepoSchemaFactory = new RdbmsCentralRepoFactory(CentralRepoPlatforms.POSTGRESQL, dbSettings);
centralRepoSchemaFactory.insertDefaultDatabaseContent();
}
/**
@ -209,6 +211,10 @@ final class PostgresCentralRepo extends RdbmsCentralRepo {
return CONFLICT_CLAUSE;
}
@Override
protected Connection getEphemeralConnection() {
return this.dbSettings.getEphemeralConnection(false);
}
/**
* Gets an exclusive lock (if applicable). Will return the lock if
* successful, null if unsuccessful because locking isn't supported, and

View File

@ -162,7 +162,7 @@ public final class PostgresCentralRepoSettings {
*
* @return Connection or null.
*/
private Connection getEphemeralConnection(boolean usePostgresDb) {
Connection getEphemeralConnection(boolean usePostgresDb) {
Connection conn;
try {
String url = getConnectionURL(usePostgresDb);
@ -290,308 +290,25 @@ public final class PostgresCentralRepoSettings {
}
/**
* Initialize the database schema.
*
* Requires valid connectionPool.
*
* This method is called from within connect(), so we cannot call connect()
* to get a connection. This method is called after setupConnectionPool(),
* so it is safe to assume that a valid connectionPool exists. The
* implementation of connect() is synchronized, so we can safely use the
* connectionPool object directly.
*/
public boolean initializeDatabaseSchema() {
// The "id" column is an alias for the built-in 64-bit int "rowid" column.
// It is autoincrementing by default and must be of type "integer primary key".
// We've omitted the autoincrement argument because we are not currently
// using the id value to search for specific rows, so we do not care
// if a rowid is re-used after an existing rows was previously deleted.
StringBuilder createOrganizationsTable = new StringBuilder();
createOrganizationsTable.append("CREATE TABLE IF NOT EXISTS organizations (");
createOrganizationsTable.append("id SERIAL PRIMARY KEY,");
createOrganizationsTable.append("org_name text NOT NULL,");
createOrganizationsTable.append("poc_name text NOT NULL,");
createOrganizationsTable.append("poc_email text NOT NULL,");
createOrganizationsTable.append("poc_phone text NOT NULL,");
createOrganizationsTable.append("CONSTRAINT org_name_unique UNIQUE (org_name)");
createOrganizationsTable.append(")");
// NOTE: The organizations will only have a small number of rows, so
// an index is probably not worthwhile.
StringBuilder createCasesTable = new StringBuilder();
createCasesTable.append("CREATE TABLE IF NOT EXISTS cases (");
createCasesTable.append("id SERIAL PRIMARY KEY,");
createCasesTable.append("case_uid text NOT NULL,");
createCasesTable.append("org_id integer,");
createCasesTable.append("case_name text NOT NULL,");
createCasesTable.append("creation_date text NOT NULL,");
createCasesTable.append("case_number text,");
createCasesTable.append("examiner_name text,");
createCasesTable.append("examiner_email text,");
createCasesTable.append("examiner_phone text,");
createCasesTable.append("notes text,");
createCasesTable.append("foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL,");
createCasesTable.append("CONSTRAINT case_uid_unique UNIQUE (case_uid)");
createCasesTable.append(")");
// NOTE: when there are few cases in the cases table, these indices may not be worthwhile
String casesIdx1 = "CREATE INDEX IF NOT EXISTS cases_org_id ON cases (org_id)";
String casesIdx2 = "CREATE INDEX IF NOT EXISTS cases_case_uid ON cases (case_uid)";
StringBuilder createReferenceSetsTable = new StringBuilder();
createReferenceSetsTable.append("CREATE TABLE IF NOT EXISTS reference_sets (");
createReferenceSetsTable.append("id SERIAL PRIMARY KEY,");
createReferenceSetsTable.append("org_id integer NOT NULL,");
createReferenceSetsTable.append("set_name text NOT NULL,");
createReferenceSetsTable.append("version text NOT NULL,");
createReferenceSetsTable.append("known_status integer NOT NULL,");
createReferenceSetsTable.append("read_only boolean NOT NULL,");
createReferenceSetsTable.append("type integer NOT NULL,");
createReferenceSetsTable.append("import_date text NOT NULL,");
createReferenceSetsTable.append("foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL,");
createReferenceSetsTable.append("CONSTRAINT hash_set_unique UNIQUE (set_name, version)");
createReferenceSetsTable.append(")");
String referenceSetsIdx1 = "CREATE INDEX IF NOT EXISTS reference_sets_org_id ON reference_sets (org_id)";
// Each "%s" will be replaced with the relevant reference_TYPE table name.
StringBuilder createReferenceTypesTableTemplate = new StringBuilder();
createReferenceTypesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
createReferenceTypesTableTemplate.append("id SERIAL PRIMARY KEY,");
createReferenceTypesTableTemplate.append("reference_set_id integer,");
createReferenceTypesTableTemplate.append("value text NOT NULL,");
createReferenceTypesTableTemplate.append("known_status integer NOT NULL,");
createReferenceTypesTableTemplate.append("comment text,");
createReferenceTypesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE (reference_set_id, value),");
createReferenceTypesTableTemplate.append("foreign key (reference_set_id) references reference_sets(id) ON UPDATE SET NULL ON DELETE SET NULL");
createReferenceTypesTableTemplate.append(")");
// Each "%s" will be replaced with the relevant reference_TYPE table name.
String referenceTypesIdx1 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
String referenceTypesIdx2 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
StringBuilder createCorrelationTypesTable = new StringBuilder();
createCorrelationTypesTable.append("CREATE TABLE IF NOT EXISTS correlation_types (");
createCorrelationTypesTable.append("id SERIAL PRIMARY KEY,");
createCorrelationTypesTable.append("display_name text NOT NULL,");
createCorrelationTypesTable.append("db_table_name text NOT NULL,");
createCorrelationTypesTable.append("supported integer NOT NULL,");
createCorrelationTypesTable.append("enabled integer NOT NULL,");
createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
createCorrelationTypesTable.append(")");
String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
String instancesCaseIdIdx = getAddCaseIdIndexTemplate();
String instancesDatasourceIdIdx = getAddDataSourceIdIndexTemplate();
String instancesValueIdx = getAddValueIndexTemplate();
String instancesKnownStatusIdx = getAddKnownStatusIndexTemplate();
String instancesObjectIdIdx = getAddObjectIdIndexTemplate();
// NOTE: the db_info table currenly only has 1 row, so having an index
// provides no benefit.
Connection conn = null;
try {
conn = getEphemeralConnection(false);
if (null == conn) {
return false;
}
Statement stmt = conn.createStatement();
stmt.execute(createOrganizationsTable.toString());
stmt.execute(createCasesTable.toString());
stmt.execute(casesIdx1);
stmt.execute(casesIdx2);
stmt.execute(getCreateDataSourcesTableStatement());
stmt.execute(getAddDataSourcesNameIndexStatement());
stmt.execute(getAddDataSourcesObjectIdIndexStatement());
stmt.execute(createReferenceSetsTable.toString());
stmt.execute(referenceSetsIdx1);
stmt.execute(createCorrelationTypesTable.toString());
/*
* Note that the essentially useless id column in the following
* table is required for backwards compatibility. Otherwise, the
* name column could be the primary key.
*/
stmt.execute("CREATE TABLE db_info (id SERIAL, name TEXT UNIQUE NOT NULL, value TEXT NOT NULL)");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
// Create a separate instance and reference table for each correlation type
List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = CorrelationAttributeInstance.getDefaultCorrelationTypes();
String reference_type_dbname;
String instance_type_dbname;
for (CorrelationAttributeInstance.Type type : DEFAULT_CORRELATION_TYPES) {
reference_type_dbname = CentralRepoDbUtil.correlationTypeToReferenceTableName(type);
instance_type_dbname = CentralRepoDbUtil.correlationTypeToInstanceTableName(type);
stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesCaseIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesDatasourceIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesValueIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesKnownStatusIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesObjectIdIdx, instance_type_dbname, instance_type_dbname));
// FUTURE: allow more than the FILES type
if (type.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) {
stmt.execute(String.format(createReferenceTypesTableTemplate.toString(), reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(referenceTypesIdx1, reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(referenceTypesIdx2, reference_type_dbname, reference_type_dbname));
}
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error initializing db schema.", ex); // NON-NLS
return false;
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error getting default correlation types. Likely due to one or more Type's with an invalid db table name."); // NON-NLS
return false;
} finally {
CentralRepoDbUtil.closeConnection(conn);
}
return true;
}
/**
* Get the template String for creating a new _instances table in a Postgres
* central repository. %s will exist in the template where the name of the
* new table will be addedd.
*
* @return a String which is a template for cretating a new _instances table
*/
static String getCreateArtifactInstancesTableTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return ("CREATE TABLE IF NOT EXISTS %s (id SERIAL PRIMARY KEY,case_id integer NOT NULL,"
+ "data_source_id integer NOT NULL,value text NOT NULL,file_path text NOT NULL,"
+ "known_status integer NOT NULL,comment text,file_obj_id BIGINT,"
+ "CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)");
}
/**
* Get the statement String for creating a new data_sources table in a
* Postgres central repository.
*
* @return a String which is a statement for cretating a new data_sources
* table
*/
static String getCreateDataSourcesTableStatement() {
return "CREATE TABLE IF NOT EXISTS data_sources "
+ "(id SERIAL PRIMARY KEY,case_id integer NOT NULL,device_id text NOT NULL,"
+ "name text NOT NULL,datasource_obj_id BIGINT,md5 text DEFAULT NULL,"
+ "sha1 text DEFAULT NULL,sha256 text DEFAULT NULL,"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT datasource_unique UNIQUE (case_id, datasource_obj_id))";
}
/**
* Get the statement for creating an index on the name column of the
* data_sources table.
*
* @return a String which is a statement for adding an index on the name
* column of the data_sources table.
*/
static String getAddDataSourcesNameIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)";
}
/**
* Get the statement for creating an index on the data_sources_object_id
* column of the data_sources table.
*
* @return a String which is a statement for adding an index on the
* data_sources_object_id column of the data_sources table.
*/
static String getAddDataSourcesObjectIdIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)";
}
/**
* Get the template for creating an index on the case_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the case_id
* column of a _instances table
*/
static String getAddCaseIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
}
/**
* Get the template for creating an index on the data_source_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* data_source_id column of a _instances table
*/
static String getAddDataSourceIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
}
/**
* Get the template for creating an index on the value column of an instance
* table. %s will exist in the template where the name of the new table will
* be addedd.
*
* @return a String which is a template for adding an index to the value
* column of a _instances table
*/
static String getAddValueIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
}
/**
* Get the template for creating an index on the known_status column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* known_status column of a _instances table
*/
static String getAddKnownStatusIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
}
/**
* Get the template for creating an index on the file_obj_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* file_obj_id column of a _instances table
*/
static String getAddObjectIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_file_obj_id ON %s (file_obj_id)";
}
public boolean insertDefaultDatabaseContent() {
Connection conn = getEphemeralConnection(false);
if (null == conn) {
return false;
}
boolean result = CentralRepoDbUtil.insertDefaultCorrelationTypes(conn) && CentralRepoDbUtil.insertDefaultOrganization(conn);
CentralRepoDbUtil.closeConnection(conn);
return result;
}
boolean isChanged() {
String hostString = ModuleSettings.getConfigSetting("CentralRepository", "db.postgresql.host"); // NON-NLS

View File

@ -116,6 +116,10 @@ abstract class RdbmsCentralRepo implements CentralRepository {
*/
protected abstract Connection connect() throws CentralRepoException;
/**
* Get an ephemeral connection.
*/
protected abstract Connection getEphemeralConnection();
/**
* Add a new name/value pair in the db_info table.
*
@ -1369,6 +1373,9 @@ abstract class RdbmsCentralRepo implements CentralRepository {
}
synchronized (bulkArtifacts) {
if (bulkArtifacts.get(CentralRepoDbUtil.correlationTypeToInstanceTableName(eamArtifact.getCorrelationType())) == null) {
bulkArtifacts.put(CentralRepoDbUtil.correlationTypeToInstanceTableName(eamArtifact.getCorrelationType()), new ArrayList<>());
}
bulkArtifacts.get(CentralRepoDbUtil.correlationTypeToInstanceTableName(eamArtifact.getCorrelationType())).add(eamArtifact);
bulkArtifactsCount++;
@ -2841,6 +2848,7 @@ abstract class RdbmsCentralRepo implements CentralRepository {
typeId = newCorrelationTypeKnownId(newType);
}
typeCache.put(newType.getId(), newType);
return typeId;
}
@ -3101,6 +3109,45 @@ abstract class RdbmsCentralRepo implements CentralRepository {
}
}
/**
* Returns a list of all correlation types. It uses the cache to build the
* list. If the cache is empty, it reads from the database and loads up the
* cache.
*
* @return List of correlation types.
* @throws CentralRepoException
*/
@Override
public List<CorrelationAttributeInstance.Type> getCorrelationTypes() throws CentralRepoException {
if (typeCache.size() == 0) {
getCorrelationTypesFromCr();
}
return new ArrayList<>(typeCache.asMap().values());
}
/**
* Gets a Correlation type with the specified name.
*
* @param correlationtypeName Correlation type name
* @return Correlation type matching the given name, null if none matches.
*
* @throws CentralRepoException
*/
public CorrelationAttributeInstance.Type getCorrelationTypeByName(String correlationtypeName) throws CentralRepoException {
List<CorrelationAttributeInstance.Type> correlationTypesList = getCorrelationTypes();
CorrelationAttributeInstance.Type correlationType
= correlationTypesList.stream()
.filter(x -> correlationtypeName.equalsIgnoreCase(x.getDisplayName()))
.findAny()
.orElse(null);
return null;
}
/**
* Get the EamArtifact.Type that has the given Type.Id from the central repo
*
@ -3138,6 +3185,30 @@ abstract class RdbmsCentralRepo implements CentralRepository {
}
}
/**
* Reads the correlation types from the database and loads them up in the cache.
*
* @throws CentralRepoException If there is an error.
*/
private void getCorrelationTypesFromCr() throws CentralRepoException {
// clear out the cache
typeCache.invalidateAll();
String sql = "SELECT * FROM correlation_types";
try ( Connection conn = connect();
PreparedStatement preparedStatement = conn.prepareStatement(sql);
ResultSet resultSet = preparedStatement.executeQuery();) {
while (resultSet.next()) {
CorrelationAttributeInstance.Type aType = getCorrelationTypeFromResultSet(resultSet);
typeCache.put(aType.getId(), aType);
}
} catch (SQLException ex) {
throw new CentralRepoException("Error getting correlation types.", ex); // NON-NLS
}
}
/**
* Convert a ResultSet to a EamCase object
*
@ -3401,39 +3472,27 @@ abstract class RdbmsCentralRepo implements CentralRepository {
*/
if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 2)) < 0) {
final String addIntegerColumnTemplate = "ALTER TABLE %s ADD COLUMN %s INTEGER;"; //NON-NLS
final String addSsidTableTemplate;
final String addCaseIdIndexTemplate;
final String addDataSourceIdIndexTemplate;
final String addValueIndexTemplate;
final String addKnownStatusIndexTemplate;
final String addObjectIdIndexTemplate;
final String addSsidTableTemplate = RdbmsCentralRepoFactory.getCreateArtifactInstancesTableTemplate(selectedPlatform);
final String addCaseIdIndexTemplate = RdbmsCentralRepoFactory.getAddCaseIdIndexTemplate();
final String addDataSourceIdIndexTemplate = RdbmsCentralRepoFactory.getAddDataSourceIdIndexTemplate();
final String addValueIndexTemplate = RdbmsCentralRepoFactory.getAddValueIndexTemplate();
final String addKnownStatusIndexTemplate = RdbmsCentralRepoFactory.getAddKnownStatusIndexTemplate();
final String addObjectIdIndexTemplate = RdbmsCentralRepoFactory.getAddObjectIdIndexTemplate();
final String addAttributeSql;
//get the data base specific code for creating a new _instance table
switch (selectedPlatform) {
case POSTGRESQL:
addAttributeSql = "INSERT INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?) " + getConflictClause(); //NON-NLS
addSsidTableTemplate = PostgresCentralRepoSettings.getCreateArtifactInstancesTableTemplate();
addCaseIdIndexTemplate = PostgresCentralRepoSettings.getAddCaseIdIndexTemplate();
addDataSourceIdIndexTemplate = PostgresCentralRepoSettings.getAddDataSourceIdIndexTemplate();
addValueIndexTemplate = PostgresCentralRepoSettings.getAddValueIndexTemplate();
addKnownStatusIndexTemplate = PostgresCentralRepoSettings.getAddKnownStatusIndexTemplate();
addObjectIdIndexTemplate = PostgresCentralRepoSettings.getAddObjectIdIndexTemplate();
break;
case SQLITE:
addAttributeSql = "INSERT OR IGNORE INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?)"; //NON-NLS
addSsidTableTemplate = SqliteCentralRepoSettings.getCreateArtifactInstancesTableTemplate();
addCaseIdIndexTemplate = SqliteCentralRepoSettings.getAddCaseIdIndexTemplate();
addDataSourceIdIndexTemplate = SqliteCentralRepoSettings.getAddDataSourceIdIndexTemplate();
addValueIndexTemplate = SqliteCentralRepoSettings.getAddValueIndexTemplate();
addKnownStatusIndexTemplate = SqliteCentralRepoSettings.getAddKnownStatusIndexTemplate();
addObjectIdIndexTemplate = SqliteCentralRepoSettings.getAddObjectIdIndexTemplate();
break;
default:
throw new CentralRepoException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded.", Bundle.AbstractSqlEamDb_cannotUpgrage_message(selectedPlatform.name()));
}
final String dataSourcesTableName = "data_sources";
final String dataSourceObjectIdColumnName = "datasource_obj_id";
if (!doesColumnExist(conn, dataSourcesTableName, dataSourceObjectIdColumnName)) {
@ -3586,8 +3645,8 @@ abstract class RdbmsCentralRepo implements CentralRepository {
+ "md5 text DEFAULT NULL,sha1 text DEFAULT NULL,sha256 text DEFAULT NULL,"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT datasource_unique UNIQUE (case_id, device_id, name, datasource_obj_id))");
statement.execute(SqliteCentralRepoSettings.getAddDataSourcesNameIndexStatement());
statement.execute(SqliteCentralRepoSettings.getAddDataSourcesObjectIdIndexStatement());
statement.execute(RdbmsCentralRepoFactory.getAddDataSourcesNameIndexStatement());
statement.execute(RdbmsCentralRepoFactory.getAddDataSourcesObjectIdIndexStatement());
statement.execute("INSERT INTO data_sources SELECT * FROM old_data_sources");
statement.execute("DROP TABLE old_data_sources");
break;

View File

@ -0,0 +1,865 @@
/*
* Central Repository
*
* Copyright 2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.datamodel;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.centralrepository.datamodel.Persona.Confidence;
import org.sleuthkit.autopsy.centralrepository.datamodel.Persona.PersonaStatus;
import static org.sleuthkit.autopsy.centralrepository.datamodel.RdbmsCentralRepo.SOFTWARE_CR_DB_SCHEMA_VERSION;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.Account;
/**
* Creates the CR schema and populates it with initial data.
*
*/
public class RdbmsCentralRepoFactory {
private final static Logger LOGGER = Logger.getLogger(RdbmsCentralRepoFactory.class.getName());
private final CentralRepoPlatforms selectedPlatform;
private final SqliteCentralRepoSettings sqliteCentralRepoSettings;
private final PostgresCentralRepoSettings postgresCentralRepoSettings;
// SQLite pragmas
private final static String PRAGMA_SYNC_OFF = "PRAGMA synchronous = OFF";
private final static String PRAGMA_JOURNAL_WAL = "PRAGMA journal_mode = WAL";
private final static String PRAGMA_READ_UNCOMMITTED_TRUE = "PRAGMA read_uncommitted = True";
private final static String PRAGMA_ENCODING_UTF8 = "PRAGMA encoding = 'UTF-8'";
private final static String PRAGMA_PAGE_SIZE_4096 = "PRAGMA page_size = 4096";
private final static String PRAGMA_FOREIGN_KEYS_ON = "PRAGMA foreign_keys = ON";
public RdbmsCentralRepoFactory(CentralRepoPlatforms selectedPlatform, SqliteCentralRepoSettings repoSettings) throws CentralRepoException {
this.selectedPlatform = selectedPlatform;
this.sqliteCentralRepoSettings = repoSettings;
this.postgresCentralRepoSettings = null;
}
public RdbmsCentralRepoFactory(CentralRepoPlatforms selectedPlatform, PostgresCentralRepoSettings repoSettings) throws CentralRepoException {
this.selectedPlatform = selectedPlatform;
this.postgresCentralRepoSettings = repoSettings;
this.sqliteCentralRepoSettings = null;
}
/**
* Initialize the database schema.
*
* Requires valid connectionPool.
*
* This method is called from within connect(), so we cannot call connect()
* to get a connection. This method is called after setupConnectionPool(),
* so it is safe to assume that a valid connectionPool exists. The
* implementation of connect() is synchronized, so we can safely use the
* connectionPool object directly.
*/
public boolean initializeDatabaseSchema() {
String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate(selectedPlatform);
String instancesCaseIdIdx = getAddCaseIdIndexTemplate();
String instancesDatasourceIdIdx = getAddDataSourceIdIndexTemplate();
String instancesValueIdx = getAddValueIndexTemplate();
String instancesKnownStatusIdx = getAddKnownStatusIndexTemplate();
String instancesObjectIdIdx = getAddObjectIdIndexTemplate();
// NOTE: the db_info table currenly only has 1 row, so having an index
// provides no benefit.
try (Connection conn = this.getEphemeralConnection();) {
if (null == conn) {
LOGGER.log(Level.SEVERE, "Cannot initialize CR database, don't have a valid connection."); // NON-NLS
return false;
}
try (Statement stmt = conn.createStatement();) {
// these setting PRAGMAs are SQLIte spcific
if (selectedPlatform == CentralRepoPlatforms.SQLITE) {
stmt.execute(PRAGMA_JOURNAL_WAL);
stmt.execute(PRAGMA_SYNC_OFF);
stmt.execute(PRAGMA_READ_UNCOMMITTED_TRUE);
stmt.execute(PRAGMA_ENCODING_UTF8);
stmt.execute(PRAGMA_PAGE_SIZE_4096);
stmt.execute(PRAGMA_FOREIGN_KEYS_ON);
}
// Create Organizations table
stmt.execute(getCreateOrganizationsTableStatement(selectedPlatform));
// Create Cases table and indexes
stmt.execute(getCreateCasesTableStatement(selectedPlatform));
stmt.execute(getCasesOrgIdIndexStatement());
stmt.execute(getCasesCaseUidIndexStatement());
stmt.execute(getCreateDataSourcesTableStatement(selectedPlatform));
stmt.execute(getAddDataSourcesNameIndexStatement());
stmt.execute(getAddDataSourcesObjectIdIndexStatement());
stmt.execute(getCreateReferenceSetsTableStatement(selectedPlatform));
stmt.execute(getReferenceSetsOrgIdIndexTemplate());
stmt.execute(getCreateCorrelationTypesTableStatement(selectedPlatform));
stmt.execute(getCreateDbInfoTableStatement(selectedPlatform));
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
// Create account_types and accounts tab;es which are referred by X_instances tables
stmt.execute(getCreateAccountTypesTableStatement(selectedPlatform));
stmt.execute(getCreateAccountsTableStatement(selectedPlatform));
// Create a separate instance and reference table for each artifact type
List<CorrelationAttributeInstance.Type> defaultCorrelationTypes = CorrelationAttributeInstance.getDefaultCorrelationTypes();
String reference_type_dbname;
String instance_type_dbname;
for (CorrelationAttributeInstance.Type type : defaultCorrelationTypes) {
reference_type_dbname = CentralRepoDbUtil.correlationTypeToReferenceTableName(type);
instance_type_dbname = CentralRepoDbUtil.correlationTypeToInstanceTableName(type);
stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesCaseIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesDatasourceIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesValueIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesKnownStatusIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesObjectIdIdx, instance_type_dbname, instance_type_dbname));
// FUTURE: allow more than the FILES type
if (type.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) {
stmt.execute(String.format(getReferenceTypesTableTemplate(selectedPlatform), reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(getReferenceTypeValueIndexTemplate(), reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(getReferenceTypeValueKnownstatusIndexTemplate(), reference_type_dbname, reference_type_dbname));
}
}
createPersonaTables(stmt);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error initializing db schema.", ex); // NON-NLS
return false;
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error getting default correlation types. Likely due to one or more Type's with an invalid db table name."); // NON-NLS
return false;
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error connecting to database.", ex); // NON-NLS
return false;
}
return true;
}
/**
* Inserts default data in CR database.
*
* @return True if success, False otherwise.
*/
public boolean insertDefaultDatabaseContent() {
boolean result;
try (Connection conn = this.getEphemeralConnection();) {
if (null == conn) {
return false;
}
result = CentralRepoDbUtil.insertDefaultCorrelationTypes(conn)
&& CentralRepoDbUtil.insertDefaultOrganization(conn)
&& insertDefaultPersonaTablesContent(conn);
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, String.format("Failed to populate default data in CR tables."), ex);
return false;
}
return result;
}
private static String getCreateDbInfoTableStatement(CentralRepoPlatforms selectedPlatform) {
/*
* Note that the essentially useless id column in the following
* table is required for backwards compatibility. Otherwise, the
* name column could be the primary key.
*/
return "CREATE TABLE db_info ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "name TEXT UNIQUE NOT NULL,"
+ "value TEXT NOT NULL "
+ ")";
}
/**
* Returns Create Table SQL for Organizations table.
*
* @param selectedPlatform CR database platform.
*
* @return SQL string to create Organizations table.
*/
private static String getCreateOrganizationsTableStatement(CentralRepoPlatforms selectedPlatform) {
// The "id" column is an alias for the built-in 64-bit int "rowid" column.
// It is autoincrementing by default and must be of type "integer primary key".
// We've omitted the autoincrement argument because we are not currently
// using the id value to search for specific rows, so we do not care
// if a rowid is re-used after an existing rows was previously deleted.
return "CREATE TABLE IF NOT EXISTS organizations ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "org_name text NOT NULL,"
+ "poc_name text NOT NULL,"
+ "poc_email text NOT NULL,"
+ "poc_phone text NOT NULL,"
+ "CONSTRAINT org_name_unique UNIQUE (org_name)"
+ ")";
}
/**
* Returns Create Table SQL for Cases table.
*
* @param selectedPlatform CR database platform.
*
* @return SQL string to create Cases table.
*/
private static String getCreateCasesTableStatement(CentralRepoPlatforms selectedPlatform) {
return ("CREATE TABLE IF NOT EXISTS cases (")
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "case_uid text NOT NULL,"
+ "org_id integer,"
+ "case_name text NOT NULL,"
+ "creation_date text NOT NULL,"
+ "case_number text,"
+ "examiner_name text,"
+ "examiner_email text,"
+ "examiner_phone text,"
+ "notes text,"
+ "foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT case_uid_unique UNIQUE(case_uid)" + getOnConflictIgnoreClause(selectedPlatform)
+ ")";
}
private static String getCasesOrgIdIndexStatement() {
return "CREATE INDEX IF NOT EXISTS cases_org_id ON cases (org_id)";
}
private static String getCasesCaseUidIndexStatement() {
return "CREATE INDEX IF NOT EXISTS cases_case_uid ON cases (case_uid)";
}
private static String getCreateReferenceSetsTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS reference_sets ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "org_id integer NOT NULL,"
+ "set_name text NOT NULL,"
+ "version text NOT NULL,"
+ "known_status integer NOT NULL,"
+ "read_only boolean NOT NULL,"
+ "type integer NOT NULL,"
+ "import_date text NOT NULL,"
+ "foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT hash_set_unique UNIQUE (set_name, version)"
+ ")";
}
/**
*
* @return
*/
private static String getReferenceSetsOrgIdIndexTemplate() {
return "CREATE INDEX IF NOT EXISTS reference_sets_org_id ON reference_sets (org_id)";
}
/**
* Returns the template string to create reference_TYPE tables.
*
* @param selectedPlatform CR database platform.
*
* @return template string to create a reference_TYPE table.
*/
private static String getReferenceTypesTableTemplate(CentralRepoPlatforms selectedPlatform) {
// Each "%s" will be replaced with the relevant reference_TYPE table name.
return "CREATE TABLE IF NOT EXISTS %s ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "reference_set_id integer,"
+ "value text NOT NULL,"
+ "known_status integer NOT NULL,"
+ "comment text,"
+ "CONSTRAINT %s_multi_unique UNIQUE(reference_set_id, value)" + getOnConflictIgnoreClause(selectedPlatform) + ","
+ "foreign key (reference_set_id) references reference_sets(id) ON UPDATE SET NULL ON DELETE SET NULL"
+ ")";
}
/**
* Returns SQL string template to create a value index on
* ReferenceType table.
*/
private static String getReferenceTypeValueIndexTemplate() {
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
}
/**
* Returns SQL string template to create a value/known_status index on
* ReferenceType table.
*/
private static String getReferenceTypeValueKnownstatusIndexTemplate() {
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
}
/**
* Returns the SQL statement to create correlation_types table.
*
* @param selectedPlatform CR database platform.
*
* @return SQL string to create correlation_types table.
*/
private static String getCreateCorrelationTypesTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS correlation_types ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "display_name text NOT NULL,"
+ "db_table_name text NOT NULL,"
+ "supported integer NOT NULL,"
+ "enabled integer NOT NULL,"
+ "CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)"
+ ")";
}
/**
* Get the template String for creating a new _instances table in a Sqlite
* central repository. %s will exist in the template where the name of the
* new table will be added.
*
* @return a String which is a template for creating a new _instances table
*/
static String getCreateArtifactInstancesTableTemplate(CentralRepoPlatforms selectedPlatform) {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE TABLE IF NOT EXISTS %s ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "case_id integer NOT NULL,"
+ "data_source_id integer NOT NULL,"
+ "account_id " + getBigIntType(selectedPlatform) + " DEFAULT NULL,"
+ "value text NOT NULL,"
+ "file_path text NOT NULL,"
+ "known_status integer NOT NULL,"
+ "comment text,"
+ "file_obj_id " + getBigIntType(selectedPlatform) + " ,"
+ "CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path)" + getOnConflictIgnoreClause(selectedPlatform) + ","
+ "foreign key (account_id) references accounts(id),"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)";
}
/**
* Get the statement String for creating a new data_sources table in a
* Sqlite central repository.
*
* @return a String which is a statement for creating a new data_sources
* table
*/
static String getCreateDataSourcesTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS data_sources ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "case_id integer NOT NULL,"
+ "device_id text NOT NULL,"
+ "name text NOT NULL,"
+ "datasource_obj_id " + getBigIntType(selectedPlatform) + " ,"
+ "md5 text DEFAULT NULL,"
+ "sha1 text DEFAULT NULL,"
+ "sha256 text DEFAULT NULL,"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT datasource_unique UNIQUE (case_id, datasource_obj_id))";
}
/**
* Get the template for creating an index on the case_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be added.
*
* @return a String which is a template for adding an index to the case_id
* column of a _instances table
*/
static String getAddCaseIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
}
/**
* Get the template for creating an index on the data_source_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be added.
*
* @return a String which is a template for adding an index to the
* data_source_id column of a _instances table
*/
static String getAddDataSourceIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
}
/**
* Get the template for creating an index on the value column of an instance
* table. %s will exist in the template where the name of the new table will
* be added.
*
* @return a String which is a template for adding an index to the value
* column of a _instances table
*/
static String getAddValueIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
}
/**
* Get the template for creating an index on the known_status column of an
* instance table. %s will exist in the template where the name of the new
* table will be added.
*
* @return a String which is a template for adding an index to the
* known_status column of a _instances table
*/
static String getAddKnownStatusIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
}
/**
* Get the template for creating an index on the file_obj_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be added.
*
* @return a String which is a template for adding an index to the
* file_obj_id column of a _instances table
*/
static String getAddObjectIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_file_obj_id ON %s (file_obj_id)";
}
/**
* Get the statement for creating an index on the name column of the
* data_sources table.
*
* @return a String which is a statement for adding an index on the name
* column of the data_sources table.
*/
static String getAddDataSourcesNameIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)";
}
/**
* Get the statement for creating an index on the data_sources_object_id
* column of the data_sources table.
*
* @return a String which is a statement for adding an index on the
* data_sources_object_id column of the data_sources table.
*/
static String getAddDataSourcesObjectIdIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)";
}
/**
* Builds SQL clause for a numeric primary key. Produces correct SQL based
* on the selected CR platform/RDMBS.
*
* @param pkName name of primary key.
*
* @return SQL clause to be used in a Create table statement
*/
private static String getNumericPrimaryKeyClause(String pkName, CentralRepoPlatforms selectedPlatform) {
switch (selectedPlatform) {
case POSTGRESQL:
return String.format(" %s SERIAL PRIMARY KEY, ", pkName);
case SQLITE:
return String.format(" %s integer primary key autoincrement NOT NULL ,", pkName);
default:
return "";
}
}
/**
* Returns ON CONFLICT IGNORE clause for the specified database platform.
*
*
* @return SQL clause.
*/
private static String getOnConflictIgnoreClause(CentralRepoPlatforms selectedPlatform) {
switch (selectedPlatform) {
case POSTGRESQL:
return "";
case SQLITE:
return " ON CONFLICT IGNORE ";
default:
return "";
}
}
/**
* Returns keyword for big integer for the specified database platform.
*
*
* @return SQL clause.
*/
private static String getBigIntType(CentralRepoPlatforms selectedPlatform) {
switch (selectedPlatform) {
case POSTGRESQL:
return " BIGINT ";
case SQLITE:
return " INTEGER ";
default:
return "";
}
}
private static String getOnConflictDoNothingClause(CentralRepoPlatforms selectedPlatform) {
switch (selectedPlatform) {
case POSTGRESQL:
return "ON CONFLICT DO NOTHING";
case SQLITE:
return "";
default:
return "";
}
}
/**
* Returns an ephemeral connection to the CR database.
*
* @return CR database connection
*/
private Connection getEphemeralConnection() {
switch (selectedPlatform) {
case POSTGRESQL:
return this.postgresCentralRepoSettings.getEphemeralConnection(false);
case SQLITE:
return this.sqliteCentralRepoSettings.getEphemeralConnection();
default:
return null;
}
}
/**
* Creates the tables for Persona.
*
* @return True if success, False otherwise.
*/
private boolean createPersonaTables(Statement stmt) throws SQLException {
stmt.execute(getCreateConfidenceTableStatement(selectedPlatform));
stmt.execute(getCreateExaminersTableStatement(selectedPlatform));
stmt.execute(getCreatePersonaStatusTableStatement(selectedPlatform));
stmt.execute(getCreateAliasesTableStatement(selectedPlatform));
stmt.execute(getCreatePersonasTableStatement(selectedPlatform));
stmt.execute(getCreatePersonaAliasTableStatement(selectedPlatform));
stmt.execute(getCreatePersonaMetadataTableStatement(selectedPlatform));
stmt.execute(getCreatePersonaAccountsTableStatement(selectedPlatform));
return true;
}
/**
* Get the SQL string for creating a new account_types table in a central
* repository.
*
* @return SQL string for creating account_types table
*/
static String getCreateAccountTypesTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS account_types ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "type_name TEXT NOT NULL,"
+ "display_name TEXT NOT NULL,"
+ "correlation_type_id " + getBigIntType(selectedPlatform) + " ,"
+ "CONSTRAINT type_name_unique UNIQUE (type_name),"
+ "FOREIGN KEY (correlation_type_id) REFERENCES correlation_types(id)"
+ ")";
}
/**
* Get the SQL String for creating a new confidence table in a central
* repository.
*
* @return SQL string for creating confidence table
*/
static String getCreateConfidenceTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS confidence ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "confidence_id integer NOT NULL,"
+ "description TEXT,"
+ "CONSTRAINT level_unique UNIQUE (confidence_id)"
+ ")";
}
/**
* Get the SQL String for creating a new examiners table in a central
* repository.
*
* @return SQL string for creating examiners table
*/
static String getCreateExaminersTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS examiners ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "login_name TEXT NOT NULL,"
+ "display_name TEXT,"
+ "CONSTRAINT login_name_unique UNIQUE(login_name)"
+ ")";
}
/**
* Get the SQL String for creating a new persona_status table in a central
* repository.
*
* @return SQL string for creating persona_status table
*/
static String getCreatePersonaStatusTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS persona_status ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "status_id integer NOT NULL,"
+ "status TEXT NOT NULL,"
+ "CONSTRAINT status_unique UNIQUE(status_id)"
+ ")";
}
/**
* Get the SQL String for creating a new aliases table in a central
* repository.
*
* @return SQL string for creating aliases table
*/
static String getCreateAliasesTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS aliases ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "alias TEXT NOT NULL,"
+ "CONSTRAINT alias_unique UNIQUE(alias)"
+ ")";
}
/**
* Get the SQL String for creating a new accounts table in a central
* repository.
*
* @return SQL string for creating accounts table
*/
static String getCreateAccountsTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS accounts ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "account_type_id integer NOT NULL,"
+ "account_unique_identifier TEXT NOT NULL,"
+ "CONSTRAINT account_unique UNIQUE(account_type_id, account_unique_identifier),"
+ "FOREIGN KEY (account_type_id) REFERENCES account_types(id)"
+ ")";
}
/**
* Get the SQL String for creating a new personas table in a central
* repository.
*
* @return SQL string for creating personas table
*/
static String getCreatePersonasTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS personas ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "uuid TEXT NOT NULL,"
+ "comment TEXT NOT NULL,"
+ "name TEXT NOT NULL,"
+ "created_date " + getBigIntType(selectedPlatform) + " ,"
+ "modified_date " + getBigIntType(selectedPlatform) + " ,"
+ "status_id integer NOT NULL,"
+ "CONSTRAINT uuid_unique UNIQUE(uuid),"
+ "FOREIGN KEY (status_id) REFERENCES persona_status(status_id)"
+ ")";
}
/**
* Get the SQL String for creating a new persona_alias table in a central
* repository.
*
* @return SQL string for creating persona_alias table
*/
static String getCreatePersonaAliasTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS persona_alias ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "persona_id " + getBigIntType(selectedPlatform) + " ,"
+ "alias_id " + getBigIntType(selectedPlatform) + " ,"
+ "justification TEXT NOT NULL,"
+ "confidence_id integer NOT NULL,"
+ "date_added " + getBigIntType(selectedPlatform) + " ,"
+ "examiner_id integer NOT NULL,"
+ "FOREIGN KEY (persona_id) REFERENCES personas(id),"
+ "FOREIGN KEY (alias_id) REFERENCES aliases(id),"
+ "FOREIGN KEY (confidence_id) REFERENCES confidence(confidence_id),"
+ "FOREIGN KEY (examiner_id) REFERENCES examiners(id)"
+ ")";
}
/**
* Get the SQL String for creating a new persona_metadata table in a central
* repository.
*
* @return SQL string for creating persona_metadata table
*/
static String getCreatePersonaMetadataTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS persona_metadata ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "persona_id " + getBigIntType(selectedPlatform) + " ,"
+ "name TEXT NOT NULL,"
+ "value TEXT NOT NULL,"
+ "justification TEXT NOT NULL,"
+ "confidence_id integer NOT NULL,"
+ "date_added " + getBigIntType(selectedPlatform) + " ,"
+ "examiner_id integer NOT NULL,"
+ "CONSTRAINT unique_metadata UNIQUE(persona_id, name),"
+ "FOREIGN KEY (persona_id) REFERENCES personas(id),"
+ "FOREIGN KEY (confidence_id) REFERENCES confidence(confidence_id),"
+ "FOREIGN KEY (examiner_id) REFERENCES examiners(id)"
+ ")";
}
/**
* Get the SQL String for creating a new persona_accounts table in a central
* repository.
*
* @return SQL string for creating persona_accounts table
*/
static String getCreatePersonaAccountsTableStatement(CentralRepoPlatforms selectedPlatform) {
return "CREATE TABLE IF NOT EXISTS persona_accounts ("
+ getNumericPrimaryKeyClause("id", selectedPlatform)
+ "persona_id " + getBigIntType(selectedPlatform) + " ,"
+ "account_id " + getBigIntType(selectedPlatform) + " ,"
+ "justification TEXT NOT NULL,"
+ "confidence_id integer NOT NULL,"
+ "date_added " + getBigIntType(selectedPlatform) + " ,"
+ "examiner_id integer NOT NULL,"
+ "FOREIGN KEY (persona_id) REFERENCES personas(id),"
+ "FOREIGN KEY (account_id) REFERENCES accounts(id),"
+ "FOREIGN KEY (confidence_id) REFERENCES confidence(confidence_id),"
+ "FOREIGN KEY (examiner_id) REFERENCES examiners(id)"
+ ")";
}
/**
* Inserts the default content in persona related tables.
*
* @param conn Database connection to use.
*
* @return True if success, false otherwise.
*/
private boolean insertDefaultPersonaTablesContent(Connection conn) {
Statement stmt = null;
try {
stmt = conn.createStatement();
// populate the confidence table
for (Confidence confidence : Persona.Confidence.values()) {
String sqlString = "INSERT INTO confidence (confidence_id, description) VALUES ( " + confidence.getLevel() + ", '" + confidence.toString() + "')" //NON-NLS
+ getOnConflictDoNothingClause(selectedPlatform);
stmt.execute(sqlString);
}
// populate the persona_status table
for (PersonaStatus status : Persona.PersonaStatus.values()) {
String sqlString = "INSERT INTO persona_status (status_id, status) VALUES ( " + status.getStatus() + ", '" + status.toString() + "')" //NON-NLS
+ getOnConflictDoNothingClause(selectedPlatform);
stmt.execute(sqlString);
}
// Populate the account_types table
for (Account.Type type : Account.Type.PREDEFINED_ACCOUNT_TYPES) {
int correlationTypeId = getCorrelationTypeIdForAccountType(conn, type);
if (correlationTypeId > 0) {
String sqlString = String.format("INSERT INTO account_types (type_name, display_name, correlation_type_id) VALUES ('%s', '%s', %d)" + getOnConflictDoNothingClause(selectedPlatform),
type.getTypeName(), type.getDisplayName(), correlationTypeId);
stmt.execute(sqlString);
}
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, String.format("Failed to populate default data in Persona tables."), ex);
return false;
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex2) {
LOGGER.log(Level.SEVERE, "Error closing statement.", ex2);
}
}
}
return true;
}
/**
* Returns the correlation type id for the given account type,
* from the correlation_types table.
*
* @param conn Connection to use for database query.
* @param accountType Account type to look for.
* '
* @return correlation type id.
*/
private int getCorrelationTypeIdForAccountType(Connection conn, Account.Type accountType) {
int typeId = -1;
if (accountType == Account.Type.EMAIL) {
typeId = CorrelationAttributeInstance.EMAIL_TYPE_ID;
} else if (accountType == Account.Type.PHONE) {
typeId = CorrelationAttributeInstance.PHONE_TYPE_ID;
} else {
String querySql = "SELECT * FROM correlation_types WHERE display_name=?";
try ( PreparedStatement preparedStatementQuery = conn.prepareStatement(querySql)) {
preparedStatementQuery.setString(1, accountType.getDisplayName());
try (ResultSet resultSet = preparedStatementQuery.executeQuery();) {
if (resultSet.next()) {
typeId = resultSet.getInt("id");
}
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, String.format("Failed to get correlation typeId for account type %s.", accountType.getTypeName()), ex);
}
}
return typeId;
}
}

View File

@ -144,7 +144,8 @@ final class SqliteCentralRepo extends RdbmsCentralRepo {
CentralRepoDbUtil.closeConnection(conn);
}
dbSettings.insertDefaultDatabaseContent();
RdbmsCentralRepoFactory centralRepoSchemaFactory = new RdbmsCentralRepoFactory(CentralRepoPlatforms.SQLITE, dbSettings);
centralRepoSchemaFactory.insertDefaultDatabaseContent();
} finally {
releaseExclusiveLock();
}
@ -226,6 +227,10 @@ final class SqliteCentralRepo extends RdbmsCentralRepo {
return "";
}
@Override
protected Connection getEphemeralConnection() {
return this.dbSettings.getEphemeralConnection();
}
/**
* Add a new name/value pair in the db_info table.
*

View File

@ -25,14 +25,11 @@ import java.nio.file.InvalidPathException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.logging.Level;
import java.util.regex.Pattern;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import static org.sleuthkit.autopsy.centralrepository.datamodel.RdbmsCentralRepo.SOFTWARE_CR_DB_SCHEMA_VERSION;
/**
* Settings for the sqlite implementation of the Central Repository database
@ -48,13 +45,7 @@ public final class SqliteCentralRepoSettings {
private final static String JDBC_DRIVER = "org.sqlite.JDBC"; // NON-NLS
private final static String JDBC_BASE_URI = "jdbc:sqlite:"; // NON-NLS
private final static String VALIDATION_QUERY = "SELECT count(*) from sqlite_master"; // NON-NLS
private final static String PRAGMA_SYNC_OFF = "PRAGMA synchronous = OFF";
private final static String PRAGMA_SYNC_NORMAL = "PRAGMA synchronous = NORMAL";
private final static String PRAGMA_JOURNAL_WAL = "PRAGMA journal_mode = WAL";
private final static String PRAGMA_READ_UNCOMMITTED_TRUE = "PRAGMA read_uncommitted = True";
private final static String PRAGMA_ENCODING_UTF8 = "PRAGMA encoding = 'UTF-8'";
private final static String PRAGMA_PAGE_SIZE_4096 = "PRAGMA page_size = 4096";
private final static String PRAGMA_FOREIGN_KEYS_ON = "PRAGMA foreign_keys = ON";
private final static String DB_NAMES_REGEX = "[a-z][a-z0-9_]*(\\.db)?";
private String dbName;
private String dbDirectory;
@ -182,7 +173,7 @@ public final class SqliteCentralRepoSettings {
*
* @return Connection or null.
*/
private Connection getEphemeralConnection() {
Connection getEphemeralConnection() {
if (!dbDirectoryExists()) {
return null;
}
@ -233,312 +224,6 @@ public final class SqliteCentralRepoSettings {
return result;
}
/**
* Initialize the database schema.
*
* Requires valid connectionPool.
*
* This method is called from within connect(), so we cannot call connect()
* to get a connection. This method is called after setupConnectionPool(),
* so it is safe to assume that a valid connectionPool exists. The
* implementation of connect() is synchronized, so we can safely use the
* connectionPool object directly.
*/
public boolean initializeDatabaseSchema() {
// The "id" column is an alias for the built-in 64-bit int "rowid" column.
// It is autoincrementing by default and must be of type "integer primary key".
// We've omitted the autoincrement argument because we are not currently
// using the id value to search for specific rows, so we do not care
// if a rowid is re-used after an existing rows was previously deleted.
StringBuilder createOrganizationsTable = new StringBuilder();
createOrganizationsTable.append("CREATE TABLE IF NOT EXISTS organizations (");
createOrganizationsTable.append("id integer primary key autoincrement NOT NULL,");
createOrganizationsTable.append("org_name text NOT NULL,");
createOrganizationsTable.append("poc_name text NOT NULL,");
createOrganizationsTable.append("poc_email text NOT NULL,");
createOrganizationsTable.append("poc_phone text NOT NULL,");
createOrganizationsTable.append("CONSTRAINT org_name_unique UNIQUE (org_name)");
createOrganizationsTable.append(")");
// NOTE: The organizations will only have a small number of rows, so
// an index is probably not worthwhile.
StringBuilder createCasesTable = new StringBuilder();
createCasesTable.append("CREATE TABLE IF NOT EXISTS cases (");
createCasesTable.append("id integer primary key autoincrement NOT NULL,");
createCasesTable.append("case_uid text NOT NULL,");
createCasesTable.append("org_id integer,");
createCasesTable.append("case_name text NOT NULL,");
createCasesTable.append("creation_date text NOT NULL,");
createCasesTable.append("case_number text,");
createCasesTable.append("examiner_name text,");
createCasesTable.append("examiner_email text,");
createCasesTable.append("examiner_phone text,");
createCasesTable.append("notes text,");
createCasesTable.append("CONSTRAINT case_uid_unique UNIQUE(case_uid) ON CONFLICT IGNORE,");
createCasesTable.append("foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL");
createCasesTable.append(")");
// NOTE: when there are few cases in the cases table, these indices may not be worthwhile
String casesIdx1 = "CREATE INDEX IF NOT EXISTS cases_org_id ON cases (org_id)";
String casesIdx2 = "CREATE INDEX IF NOT EXISTS cases_case_uid ON cases (case_uid)";
StringBuilder createReferenceSetsTable = new StringBuilder();
createReferenceSetsTable.append("CREATE TABLE IF NOT EXISTS reference_sets (");
createReferenceSetsTable.append("id integer primary key autoincrement NOT NULL,");
createReferenceSetsTable.append("org_id integer NOT NULL,");
createReferenceSetsTable.append("set_name text NOT NULL,");
createReferenceSetsTable.append("version text NOT NULL,");
createReferenceSetsTable.append("known_status integer NOT NULL,");
createReferenceSetsTable.append("read_only boolean NOT NULL,");
createReferenceSetsTable.append("type integer NOT NULL,");
createReferenceSetsTable.append("import_date text NOT NULL,");
createReferenceSetsTable.append("foreign key (org_id) references organizations(id) ON UPDATE SET NULL ON DELETE SET NULL,");
createReferenceSetsTable.append("CONSTRAINT hash_set_unique UNIQUE (set_name, version)");
createReferenceSetsTable.append(")");
String referenceSetsIdx1 = "CREATE INDEX IF NOT EXISTS reference_sets_org_id ON reference_sets (org_id)";
// Each "%s" will be replaced with the relevant reference_TYPE table name.
StringBuilder createReferenceTypesTableTemplate = new StringBuilder();
createReferenceTypesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
createReferenceTypesTableTemplate.append("id integer primary key autoincrement NOT NULL,");
createReferenceTypesTableTemplate.append("reference_set_id integer,");
createReferenceTypesTableTemplate.append("value text NOT NULL,");
createReferenceTypesTableTemplate.append("known_status integer NOT NULL,");
createReferenceTypesTableTemplate.append("comment text,");
createReferenceTypesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(reference_set_id, value) ON CONFLICT IGNORE,");
createReferenceTypesTableTemplate.append("foreign key (reference_set_id) references reference_sets(id) ON UPDATE SET NULL ON DELETE SET NULL");
createReferenceTypesTableTemplate.append(")");
// Each "%s" will be replaced with the relevant reference_TYPE table name.
String referenceTypesIdx1 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
String referenceTypesIdx2 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
StringBuilder createCorrelationTypesTable = new StringBuilder();
createCorrelationTypesTable.append("CREATE TABLE IF NOT EXISTS correlation_types (");
createCorrelationTypesTable.append("id integer primary key autoincrement NOT NULL,");
createCorrelationTypesTable.append("display_name text NOT NULL,");
createCorrelationTypesTable.append("db_table_name text NOT NULL,");
createCorrelationTypesTable.append("supported integer NOT NULL,");
createCorrelationTypesTable.append("enabled integer NOT NULL,");
createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
createCorrelationTypesTable.append(")");
String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
String instancesCaseIdIdx = getAddCaseIdIndexTemplate();
String instancesDatasourceIdIdx = getAddDataSourceIdIndexTemplate();
String instancesValueIdx = getAddValueIndexTemplate();
String instancesKnownStatusIdx = getAddKnownStatusIndexTemplate();
String instancesObjectIdIdx = getAddObjectIdIndexTemplate();
// NOTE: the db_info table currenly only has 1 row, so having an index
// provides no benefit.
Connection conn = null;
try {
conn = getEphemeralConnection();
if (null == conn) {
return false;
}
Statement stmt = conn.createStatement();
stmt.execute(PRAGMA_JOURNAL_WAL);
stmt.execute(PRAGMA_SYNC_OFF);
stmt.execute(PRAGMA_READ_UNCOMMITTED_TRUE);
stmt.execute(PRAGMA_ENCODING_UTF8);
stmt.execute(PRAGMA_PAGE_SIZE_4096);
stmt.execute(PRAGMA_FOREIGN_KEYS_ON);
stmt.execute(createOrganizationsTable.toString());
stmt.execute(createCasesTable.toString());
stmt.execute(casesIdx1);
stmt.execute(casesIdx2);
stmt.execute(getCreateDataSourcesTableStatement());
stmt.execute(getAddDataSourcesNameIndexStatement());
stmt.execute(getAddDataSourcesObjectIdIndexStatement());
stmt.execute(createReferenceSetsTable.toString());
stmt.execute(referenceSetsIdx1);
stmt.execute(createCorrelationTypesTable.toString());
/*
* Note that the essentially useless id column in the following
* table is required for backwards compatibility. Otherwise, the
* name column could be the primary key.
*/
stmt.execute("CREATE TABLE db_info (id INTEGER PRIMARY KEY, name TEXT UNIQUE NOT NULL, value TEXT NOT NULL)");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MAJOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMajor() + "')");
stmt.execute("INSERT INTO db_info (name, value) VALUES ('" + RdbmsCentralRepo.CREATION_SCHEMA_MINOR_VERSION_KEY + "', '" + SOFTWARE_CR_DB_SCHEMA_VERSION.getMinor() + "')");
// Create a separate instance and reference table for each artifact type
List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = CorrelationAttributeInstance.getDefaultCorrelationTypes();
String reference_type_dbname;
String instance_type_dbname;
for (CorrelationAttributeInstance.Type type : DEFAULT_CORRELATION_TYPES) {
reference_type_dbname = CentralRepoDbUtil.correlationTypeToReferenceTableName(type);
instance_type_dbname = CentralRepoDbUtil.correlationTypeToInstanceTableName(type);
stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesCaseIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesDatasourceIdIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesValueIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesKnownStatusIdx, instance_type_dbname, instance_type_dbname));
stmt.execute(String.format(instancesObjectIdIdx, instance_type_dbname, instance_type_dbname));
// FUTURE: allow more than the FILES type
if (type.getId() == CorrelationAttributeInstance.FILES_TYPE_ID) {
stmt.execute(String.format(createReferenceTypesTableTemplate.toString(), reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(referenceTypesIdx1, reference_type_dbname, reference_type_dbname));
stmt.execute(String.format(referenceTypesIdx2, reference_type_dbname, reference_type_dbname));
}
}
} catch (SQLException ex) {
LOGGER.log(Level.SEVERE, "Error initializing db schema.", ex); // NON-NLS
return false;
} catch (CentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error getting default correlation types. Likely due to one or more Type's with an invalid db table name."); // NON-NLS
return false;
} finally {
CentralRepoDbUtil.closeConnection(conn);
}
return true;
}
/**
* Get the template String for creating a new _instances table in a Sqlite
* central repository. %s will exist in the template where the name of the
* new table will be addedd.
*
* @return a String which is a template for cretating a new _instances table
*/
static String getCreateArtifactInstancesTableTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE TABLE IF NOT EXISTS %s (id integer primary key autoincrement NOT NULL,"
+ "case_id integer NOT NULL,data_source_id integer NOT NULL,value text NOT NULL,"
+ "file_path text NOT NULL,known_status integer NOT NULL,comment text,file_obj_id integer,"
+ "CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL)";
}
/**
* Get the statement String for creating a new data_sources table in a
* Sqlite central repository.
*
* @return a String which is a statement for cretating a new data_sources
* table
*/
static String getCreateDataSourcesTableStatement() {
return "CREATE TABLE IF NOT EXISTS data_sources (id integer primary key autoincrement NOT NULL,"
+ "case_id integer NOT NULL,device_id text NOT NULL,name text NOT NULL,datasource_obj_id integer,"
+ "md5 text DEFAULT NULL,sha1 text DEFAULT NULL,sha256 text DEFAULT NULL,"
+ "foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,"
+ "CONSTRAINT datasource_unique UNIQUE (case_id, datasource_obj_id))";
}
/**
* Get the statement for creating an index on the name column of the
* data_sources table.
*
* @return a String which is a statement for adding an index on the name
* column of the data_sources table.
*/
static String getAddDataSourcesNameIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_name ON data_sources (name)";
}
/**
* Get the statement for creating an index on the data_sources_object_id
* column of the data_sources table.
*
* @return a String which is a statement for adding an index on the
* data_sources_object_id column of the data_sources table.
*/
static String getAddDataSourcesObjectIdIndexStatement() {
return "CREATE INDEX IF NOT EXISTS data_sources_object_id ON data_sources (datasource_obj_id)";
}
/**
* Get the template for creating an index on the case_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the case_id
* column of a _instances table
*/
static String getAddCaseIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
}
/**
* Get the template for creating an index on the data_source_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* data_source_id column of a _instances table
*/
static String getAddDataSourceIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
}
/**
* Get the template for creating an index on the value column of an instance
* table. %s will exist in the template where the name of the new table will
* be addedd.
*
* @return a String which is a template for adding an index to the value
* column of a _instances table
*/
static String getAddValueIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
}
/**
* Get the template for creating an index on the known_status column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* known_status column of a _instances table
*/
static String getAddKnownStatusIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
}
/**
* Get the template for creating an index on the file_obj_id column of an
* instance table. %s will exist in the template where the name of the new
* table will be addedd.
*
* @return a String which is a template for adding an index to the
* file_obj_id column of a _instances table
*/
static String getAddObjectIdIndexTemplate() {
// Each "%s" will be replaced with the relevant TYPE_instances table name.
return "CREATE INDEX IF NOT EXISTS %s_file_obj_id ON %s (file_obj_id)";
}
public boolean insertDefaultDatabaseContent() {
Connection conn = getEphemeralConnection();
if (null == conn) {
return false;
}
boolean result = CentralRepoDbUtil.insertDefaultCorrelationTypes(conn) && CentralRepoDbUtil.insertDefaultOrganization(conn);
CentralRepoDbUtil.closeConnection(conn);
return result;
}
boolean isChanged() {
String dbNameString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbName"); // NON-NLS
String dbDirectoryString = ModuleSettings.getConfigSetting("CentralRepository", "db.sqlite.dbDirectory"); // NON-NLS

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2018 Basis Technology Corp.
* Copyright 2017-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -52,7 +52,6 @@ import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
/**
@ -197,7 +196,7 @@ final class CaseEventListener implements PropertyChangeListener {
}
}
final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeInstanceFromContent(af);
final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeCorrAttrFromFile(af);
if (eamArtifact != null) {
// send update to Central Repository db
@ -297,7 +296,7 @@ final class CaseEventListener implements PropertyChangeListener {
return;
}
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(bbArtifact, true);
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeCorrAttrsFromArtifact(bbArtifact);
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
eamArtifact.setComment(comment);
try {
@ -370,7 +369,7 @@ final class CaseEventListener implements PropertyChangeListener {
if (!hasTagWithConflictingKnownStatus) {
//Get the correlation atttributes that correspond to the current BlackboardArtifactTag if their status should be changed
//with the initial set of correlation attributes this should be a single correlation attribute
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(bbTag.getArtifact(), true);
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeCorrAttrsFromArtifact(bbTag.getArtifact());
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
CentralRepository.getInstance().setAttributeInstanceKnownStatus(eamArtifact, tagName.getKnownStatus());
}
@ -406,12 +405,15 @@ final class CaseEventListener implements PropertyChangeListener {
}
//if the file will have no tags with a status which would prevent the current status from being changed
if (!hasTagWithConflictingKnownStatus) {
final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeInstanceFromContent(contentTag.getContent());
Content taggedContent = contentTag.getContent();
if (taggedContent instanceof AbstractFile) {
final CorrelationAttributeInstance eamArtifact = CorrelationAttributeUtil.makeCorrAttrFromFile((AbstractFile)taggedContent);
if (eamArtifact != null) {
CentralRepository.getInstance().setAttributeInstanceKnownStatus(eamArtifact, tagName.getKnownStatus());
}
}
}
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Cannot update known status in central repository for tag: " + modifiedTagName, ex); //NON-NLS
} catch (CentralRepoException ex) {
@ -513,7 +515,7 @@ final class CaseEventListener implements PropertyChangeListener {
Content dataSource = dataSourceNameChangedEvent.getDataSource();
String newName = (String) event.getNewValue();
if (! StringUtils.isEmpty(newName)) {
if (!StringUtils.isEmpty(newName)) {
if (!CentralRepository.isEnabled()) {
return;

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2019 Basis Technology Corp.
* Copyright 2017-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -456,7 +456,7 @@ public class IngestEventsListener {
for (BlackboardArtifact bbArtifact : bbArtifacts) {
// eamArtifact will be null OR a EamArtifact containing one EamArtifactInstance.
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(bbArtifact, true);
List<CorrelationAttributeInstance> convertedArtifacts = CorrelationAttributeUtil.makeCorrAttrsFromArtifact(bbArtifact);
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
try {
// Only do something with this artifact if it's unique within the job

View File

@ -133,7 +133,7 @@
<Component id="lbUserName" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="lbPort" alignment="0" min="-2" max="-2" attributes="0"/>
<Group type="103" alignment="0" groupAlignment="1" max="-2" attributes="0">
<Component id="lbDatabaseDesc" alignment="0" max="32767" attributes="0"/>
<Component id="lbDatabaseDesc" alignment="0" pref="94" max="32767" attributes="0"/>
<Component id="lbUserPassword" alignment="0" max="32767" attributes="0"/>
</Group>
</Group>

View File

@ -45,6 +45,7 @@ import static org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoPlatf
import org.sleuthkit.autopsy.centralrepository.datamodel.PostgresCentralRepoSettings;
import org.sleuthkit.autopsy.centralrepository.datamodel.SqliteCentralRepoSettings;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.RdbmsCentralRepoFactory;
/**
* Configuration dialog for Central Repository database settings.
@ -447,8 +448,14 @@ public class EamDbSettingsDialog extends JDialog {
dbCreated = dbSettingsPostgres.createDatabase();
}
if (dbCreated) {
result = dbSettingsPostgres.initializeDatabaseSchema()
&& dbSettingsPostgres.insertDefaultDatabaseContent();
try {
RdbmsCentralRepoFactory centralRepoSchemaFactory = new RdbmsCentralRepoFactory(selectedPlatform, dbSettingsPostgres);
result = centralRepoSchemaFactory.initializeDatabaseSchema()
&& centralRepoSchemaFactory.insertDefaultDatabaseContent();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Unable to initialize database schema or insert contents into Postgres central repository.", ex);
}
}
if (!result) {
// Remove the incomplete database
@ -469,8 +476,14 @@ public class EamDbSettingsDialog extends JDialog {
dbCreated = dbSettingsSqlite.createDbDirectory();
}
if (dbCreated) {
result = dbSettingsSqlite.initializeDatabaseSchema()
&& dbSettingsSqlite.insertDefaultDatabaseContent();
try {
RdbmsCentralRepoFactory centralRepoSchemaFactory = new RdbmsCentralRepoFactory(selectedPlatform, dbSettingsSqlite);
result = centralRepoSchemaFactory.initializeDatabaseSchema()
&& centralRepoSchemaFactory.insertDefaultDatabaseContent();
} catch (CentralRepoException ex) {
logger.log(Level.SEVERE, "Unable to initialize database schema or insert contents into SQLite central repository.", ex);
}
}
if (!result) {
if (dbCreated) {

View File

@ -42,6 +42,7 @@ class CommandLineCommand {
*/
static enum InputType {
CASE_NAME,
CASE_TYPE,
CASES_BASE_DIR_PATH,
CASE_FOLDER_PATH,
DATA_SOURCE_PATH,

View File

@ -38,6 +38,7 @@ import org.netbeans.spi.sendopts.OptionProcessor;
import org.openide.LifecycleManager;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.Case.CaseType;
import org.sleuthkit.autopsy.casemodule.CaseActionException;
import org.sleuthkit.autopsy.casemodule.CaseDetails;
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
@ -157,7 +158,12 @@ public class CommandLineIngestManager {
Map<String, String> inputs = command.getInputs();
String baseCaseName = inputs.get(CommandLineCommand.InputType.CASE_NAME.name());
String rootOutputDirectory = inputs.get(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name());
openCase(baseCaseName, rootOutputDirectory);
CaseType caseType = CaseType.SINGLE_USER_CASE;
String caseTypeString = inputs.get(CommandLineCommand.InputType.CASE_TYPE.name());
if (caseTypeString != null && caseTypeString.equalsIgnoreCase(CommandLineOptionProcessor.CASETYPE_MULTI)) {
caseType = CaseType.MULTI_USER_CASE;
}
openCase(baseCaseName, rootOutputDirectory, caseType);
String outputDirPath = getOutputDirPath(caseForJob);
OutputGenerator.saveCreateCaseOutput(caseForJob, outputDirPath, baseCaseName);
@ -340,7 +346,7 @@ public class CommandLineIngestManager {
*
* @throws CaseActionException
*/
private void openCase(String baseCaseName, String rootOutputDirectory) throws CaseActionException {
private void openCase(String baseCaseName, String rootOutputDirectory, CaseType caseType) throws CaseActionException {
LOGGER.log(Level.INFO, "Opening case {0} in directory {1}", new Object[]{baseCaseName, rootOutputDirectory});
Path caseDirectoryPath = findCaseDirectory(Paths.get(rootOutputDirectory), baseCaseName);
@ -355,7 +361,7 @@ public class CommandLineIngestManager {
Case.createCaseDirectory(caseDirectoryPath.toString(), Case.CaseType.SINGLE_USER_CASE);
CaseDetails caseDetails = new CaseDetails(baseCaseName);
Case.createAsCurrentCase(Case.CaseType.SINGLE_USER_CASE, caseDirectoryPath.toString(), caseDetails);
Case.createAsCurrentCase(caseType, caseDirectoryPath.toString(), caseDetails);
}
caseForJob = Case.getCurrentCase();

View File

@ -31,6 +31,7 @@ import org.netbeans.spi.sendopts.Env;
import org.netbeans.spi.sendopts.Option;
import org.netbeans.spi.sendopts.OptionProcessor;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.featureaccess.FeatureAccessUtils;
/**
* This class can be used to add command line options to Autopsy
@ -40,6 +41,7 @@ public class CommandLineOptionProcessor extends OptionProcessor {
private static final Logger logger = Logger.getLogger(CommandLineOptionProcessor.class.getName());
private final Option caseNameOption = Option.requiredArgument('n', "caseName");
private final Option caseTypeOption = Option.requiredArgument('t', "caseType");
private final Option caseBaseDirOption = Option.requiredArgument('o', "caseBaseDir");
private final Option createCaseCommandOption = Option.withoutArgument('c', "createCase");
private final Option dataSourcePathOption = Option.requiredArgument('s', "dataSourcePath");
@ -55,11 +57,15 @@ public class CommandLineOptionProcessor extends OptionProcessor {
private final List<CommandLineCommand> commands = new ArrayList<>();
final static String CASETYPE_MULTI = "multi";
final static String CASETYPE_SINGLE = "single";
@Override
protected Set<Option> getOptions() {
Set<Option> set = new HashSet<>();
set.add(createCaseCommandOption);
set.add(caseNameOption);
set.add(caseTypeOption);
set.add(caseBaseDirOption);
set.add(dataSourcePathOption);
set.add(addDataSourceCommandOption);
@ -107,6 +113,37 @@ public class CommandLineOptionProcessor extends OptionProcessor {
}
}
String caseType = "";
if (values.containsKey(caseTypeOption)) {
argDirs = values.get(caseTypeOption);
if (argDirs.length < 1) {
logger.log(Level.SEVERE, "Missing argument 'caseType'");
System.err.println("Missing argument 'caseType'");
return;
}
caseType = argDirs[0];
if (caseType == null || caseType.isEmpty()) {
logger.log(Level.SEVERE, "'caseType' argument is empty");
System.err.println("'caseType' argument is empty");
return;
}
if (!caseType.equalsIgnoreCase(CASETYPE_MULTI) && !caseType.equalsIgnoreCase(CASETYPE_SINGLE)) {
logger.log(Level.SEVERE, "'caseType' argument is invalid");
System.err.println("'caseType' argument is invalid");
return;
}
if (caseType.equalsIgnoreCase(CASETYPE_MULTI) && !FeatureAccessUtils.canCreateMultiUserCases()) {
logger.log(Level.SEVERE, "Unable to create multi user case.");
System.err.println("Unable to create multi user case. Confirm that multi user settings are configured correctly.");
return;
}
}
String caseBaseDir = "";
if (values.containsKey(caseBaseDirOption)) {
argDirs = values.get(caseBaseDirOption);
@ -241,6 +278,7 @@ public class CommandLineOptionProcessor extends OptionProcessor {
CommandLineCommand newCommand = new CommandLineCommand(CommandLineCommand.CommandType.CREATE_CASE);
newCommand.addInputValue(CommandLineCommand.InputType.CASE_NAME.name(), inputCaseName);
newCommand.addInputValue(CommandLineCommand.InputType.CASES_BASE_DIR_PATH.name(), caseBaseDir);
newCommand.addInputValue(CommandLineCommand.InputType.CASE_TYPE.name(), caseType);
commands.add(newCommand);
runFromCommandLine = true;
}

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -123,8 +123,7 @@ final public class CommonAttributeCaseSearchResults {
if (currentCaseDataSourceMap == null) { //there are no results
return filteredCaseNameToDataSourcesTree;
}
CorrelationAttributeInstance.Type attributeType = CorrelationAttributeInstance
.getDefaultCorrelationTypes()
CorrelationAttributeInstance.Type attributeType = CentralRepository.getInstance().getCorrelationTypes()
.stream()
.filter(filterType -> filterType.getId() == resultTypeId)
.findFirst().get();

View File

@ -128,13 +128,12 @@ final public class CommonAttributeCountSearchResults {
return;
}
CorrelationAttributeInstance.Type attributeType = CorrelationAttributeInstance
.getDefaultCorrelationTypes()
CentralRepository eamDb = CentralRepository.getInstance();
CorrelationAttributeInstance.Type attributeType = eamDb.getCorrelationTypes()
.stream()
.filter(filterType -> filterType.getId() == this.resultTypeId)
.findFirst().get();
CentralRepository eamDb = CentralRepository.getInstance();
Map<Integer, List<CommonAttributeValue>> itemsToRemove = new HashMap<>();
//Call countUniqueDataSources once to reduce the number of DB queries needed to get

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -255,7 +255,7 @@ final class CommonAttributePanel extends javax.swing.JDialog implements Observer
filterByDocuments = interCasePanel.documentsCheckboxIsSelected();
}
if (corType == null) {
corType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
corType = CentralRepository.getInstance().getCorrelationTypes().get(0);
}
if (caseId == InterCasePanel.NO_CASE_SELECTED) {
builder = new AllInterCaseCommonAttributeSearcher(filterByMedia, filterByDocuments, corType, percentageThreshold);
@ -366,7 +366,7 @@ final class CommonAttributePanel extends javax.swing.JDialog implements Observer
filterByDocuments = interCasePanel.documentsCheckboxIsSelected();
}
if (corType == null) {
corType = CorrelationAttributeInstance.getDefaultCorrelationTypes().get(0);
corType = CentralRepository.getInstance().getCorrelationTypes().get(0);
}
if (caseId == InterCasePanel.NO_CASE_SELECTED) {
builder = new AllInterCaseCommonAttributeSearcher(filterByMedia, filterByDocuments, corType, percentageThreshold);

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -31,6 +31,7 @@ import java.util.logging.Level;
import javax.swing.ComboBoxModel;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
@ -117,7 +118,7 @@ public final class InterCasePanel extends javax.swing.JPanel {
void setupCorrelationTypeFilter() {
this.correlationTypeFilters = new HashMap<>();
try {
List<CorrelationAttributeInstance.Type> types = CorrelationAttributeInstance.getDefaultCorrelationTypes();
List<CorrelationAttributeInstance.Type> types = CentralRepository.getInstance().getCorrelationTypes();
for (CorrelationAttributeInstance.Type type : types) {
correlationTypeFilters.put(type.getDisplayName(), type);
this.correlationTypeComboBox.addItem(type.getDisplayName());

View File

@ -65,7 +65,6 @@ import org.sleuthkit.datamodel.CommunicationsFilter.AccountTypeFilter;
import org.sleuthkit.datamodel.CommunicationsFilter.DateRangeFilter;
import org.sleuthkit.datamodel.CommunicationsFilter.DeviceFilter;
import org.sleuthkit.datamodel.CommunicationsFilter.MostRecentFilter;
import org.sleuthkit.datamodel.CommunicationsManager;
import org.sleuthkit.datamodel.DataSource;
import static org.sleuthkit.datamodel.Relationship.Type.CALL_LOG;
import static org.sleuthkit.datamodel.Relationship.Type.CONTACT;
@ -149,7 +148,6 @@ final public class FiltersPanel extends JPanel {
updateTimeZone();
validationListener = itemEvent -> validateFilters();
updateFilters(true);
UserPreferences.addChangeListener(preferenceChangeEvent -> {
if (preferenceChangeEvent.getKey().equals(UserPreferences.DISPLAY_TIMES_IN_LOCAL_TIME)
|| preferenceChangeEvent.getKey().equals(UserPreferences.TIME_ZONE_FOR_DISPLAYS)) {
@ -239,9 +237,15 @@ final public class FiltersPanel extends JPanel {
* Updates the filter widgets to reflect he data sources/types in the case.
*/
private boolean updateFilters(boolean initialState) {
boolean newAccountType = updateAccountTypeFilter(initialState);
boolean newDeviceFilter = updateDeviceFilter(initialState);
final SleuthkitCase sleuthkitCase;
try {
sleuthkitCase = Case.getCurrentCaseThrows().getSleuthkitCase();
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "Unable to perform filter update, update has been cancelled. Case is closed.", ex);
return false;
}
boolean newAccountType = updateAccountTypeFilter(initialState, sleuthkitCase);
boolean newDeviceFilter = updateDeviceFilter(initialState, sleuthkitCase);
// both or either are true, return true;
return newAccountType || newDeviceFilter;
}
@ -277,17 +281,18 @@ final public class FiltersPanel extends JPanel {
}
/**
* Populate the Account Types filter widgets
* Populate the Account Types filter widgets.
*
* @param selected the initial value for the account type checkbox
* @param selected The initial value for the account type checkbox.
* @param sleuthkitCase The sleuthkit case for containing the account
* information.
*
* @return True, if a new accountType was found
*/
private boolean updateAccountTypeFilter(boolean selected) {
private boolean updateAccountTypeFilter(boolean selected, SleuthkitCase sleuthkitCase) {
boolean newOneFound = false;
try {
final CommunicationsManager communicationsManager = Case.getCurrentCaseThrows().getSleuthkitCase().getCommunicationsManager();
List<Account.Type> accountTypesInUse = communicationsManager.getAccountTypesInUse();
List<Account.Type> accountTypesInUse = sleuthkitCase.getCommunicationsManager().getAccountTypesInUse();
for (Account.Type type : accountTypesInUse) {
@ -302,10 +307,7 @@ final public class FiltersPanel extends JPanel {
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to update to update Account Types Filter", ex);
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "A case is required to update the account types filter.", ex);
}
if (newOneFound) {
accountTypeListPane.revalidate();
}
@ -333,17 +335,17 @@ final public class FiltersPanel extends JPanel {
}
/**
* Populate the devices filter widgets
* Populate the devices filter widgets.
*
* @param selected Sets the initial state of device check box
* @param selected Sets the initial state of device check box.
* @param sleuthkitCase The sleuthkit case for containing the data source
* information.
*
* @return true if a new device was found
*/
private boolean updateDeviceFilter(boolean selected) {
private boolean updateDeviceFilter(boolean selected, SleuthkitCase sleuthkitCase) {
boolean newOneFound = false;
try {
final SleuthkitCase sleuthkitCase = Case.getCurrentCaseThrows().getSleuthkitCase();
for (DataSource dataSource : sleuthkitCase.getDataSources()) {
String dsName = sleuthkitCase.getContentById(dataSource.getId()).getName();
if (devicesMap.containsKey(dataSource.getDeviceId())) {
@ -358,8 +360,6 @@ final public class FiltersPanel extends JPanel {
newOneFound = true;
}
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Filter update cancelled. Case is closed.");
} catch (TskCoreException tskCoreException) {
logger.log(Level.SEVERE, "There was a error loading the datasources for the case.", tskCoreException);
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -111,7 +111,7 @@ final class CorrelationCaseChildNodeFactory extends ChildFactory<CorrelationCase
private CorrelationAttributeInstance.Type getCorrelationType(Account.Type accountType) throws CentralRepoException {
if (correlationTypeMap == null) {
correlationTypeMap = new HashMap<>();
List<CorrelationAttributeInstance.Type> correcationTypeList = CorrelationAttributeInstance.getDefaultCorrelationTypes();
List<CorrelationAttributeInstance.Type> correcationTypeList = CentralRepository.getInstance().getCorrelationTypes();
correcationTypeList.forEach((type) -> {
correlationTypeMap.put(type.getId(), type);
});

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -198,7 +198,7 @@ public class AnnotationsContentViewer extends javax.swing.JPanel implements Data
startSection(html, "Central Repository Comments");
List<CorrelationAttributeInstance> instancesList = new ArrayList<>();
if (artifact != null) {
instancesList.addAll(CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(artifact, false));
instancesList.addAll(CorrelationAttributeUtil.makeCorrAttrsFromArtifact(artifact));
}
try {
List<CorrelationAttributeInstance.Type> artifactTypes = CentralRepository.getInstance().getDefinedCorrelationTypes();

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.coreutils;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
@ -288,13 +287,92 @@ public final class AppSQLiteDB {
}
/**
* Returns connection meta data.
* Checks if a column exists in a table.
*
* @return DatabaseMetaData
* @throws SQLException
* @param tableName name of the table
* @param columnName column name to check
*
* @return true if the column exists, false otherwise
* @throws TskCoreException
*/
public DatabaseMetaData getConnectionMetadata() throws SQLException {
return connection.getMetaData();
public boolean columnExists(String tableName, String columnName) throws TskCoreException {
boolean columnExists = false;
Statement colExistsStatement = null;
ResultSet resultSet = null;
try {
colExistsStatement = connection.createStatement();
String tableInfoQuery = "PRAGMA table_info(%s)"; //NON-NLS
resultSet = colExistsStatement.executeQuery(String.format(tableInfoQuery, tableName));
while (resultSet.next()) {
if (resultSet.getString("name").equalsIgnoreCase(columnName)) {
columnExists = true;
break;
}
}
} catch (SQLException ex) {
throw new TskCoreException("Error checking if column " + columnName + "exists ", ex);
} finally {
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException ex2) {
logger.log(Level.WARNING, "Failed to close resultset after checking column", ex2);
}
}
if (colExistsStatement != null) {
try {
colExistsStatement.close();
} catch (SQLException ex2) {
logger.log(Level.SEVERE, "Error closing Statement", ex2); //NON-NLS
}
}
}
return columnExists;
}
/**
* Checks if a table exists in the case database.
*
* @param tableName name of the table to check
*
* @return true if the table exists, false otherwise
* @throws TskCoreException
*/
public boolean tableExists(String tableName) throws TskCoreException {
boolean tableExists = false;
Statement tableExistsStatement = null;
ResultSet resultSet = null;
try {
tableExistsStatement = connection.createStatement();
resultSet = tableExistsStatement.executeQuery("SELECT name FROM sqlite_master WHERE type='table'"); //NON-NLS
while (resultSet.next()) {
if (resultSet.getString("name").equalsIgnoreCase(tableName)) { //NON-NLS
tableExists = true;
break;
}
}
} catch (SQLException ex) {
throw new TskCoreException("Error checking if table " + tableName + "exists ", ex);
} finally {
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException ex2) {
logger.log(Level.WARNING, "Failed to close resultset after checking table", ex2);
}
}
if (tableExistsStatement != null) {
try {
tableExistsStatement.close();
} catch (SQLException ex2) {
logger.log(Level.SEVERE, "Error closing Statement", ex2); //NON-NLS
}
}
}
return tableExists;
}
/**

View File

@ -69,7 +69,7 @@ public class PlatformUtil {
* @return absolute path string to the install root dir
*/
public static String getInstallPath() {
File coreFolder = InstalledFileLocator.getDefault().locate("core", PlatformUtil.class.getPackage().getName(), false); //NON-NLS
File coreFolder = InstalledFileLocator.getDefault().locate("core", "org.sleuthkit.autopsy.core", false); //NON-NLS
File rootPath = coreFolder.getParentFile().getParentFile();
return rootPath.getAbsolutePath();
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2020 Basis Technology Corp.
* Copyright 2012-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -549,7 +549,7 @@ public abstract class AbstractAbstractFileNode<T extends AbstractFile> extends A
protected CorrelationAttributeInstance getCorrelationAttributeInstance() {
CorrelationAttributeInstance attribute = null;
if (CentralRepository.isEnabled() && !UserPreferences.getHideSCOColumns()) {
attribute = CorrelationAttributeUtil.getInstanceFromContent(content);
attribute = CorrelationAttributeUtil.getCorrAttrForFile(content);
}
return attribute;
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2020 Basis Technology Corp.
* Copyright 2012-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -605,8 +605,8 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
@Override
protected final CorrelationAttributeInstance getCorrelationAttributeInstance() {
CorrelationAttributeInstance correlationAttribute = null;
if (CentralRepository.isEnabled()) {
correlationAttribute = CorrelationAttributeUtil.getInstanceFromContent(associated);
if (CentralRepository.isEnabled() && associated instanceof AbstractFile) {
correlationAttribute = CorrelationAttributeUtil.getCorrAttrForFile((AbstractFile)associated);
}
return correlationAttribute;
}

View File

@ -97,7 +97,7 @@ class GetSCOTask implements Runnable {
logger.log(Level.WARNING, "Unable to get correlation type or value to determine value for O column for artifact", ex);
}
} else {
List<CorrelationAttributeInstance> listOfPossibleAttributes = CorrelationAttributeUtil.makeInstancesFromBlackboardArtifact(bbArtifact, false);
List<CorrelationAttributeInstance> listOfPossibleAttributes = CorrelationAttributeUtil.makeCorrAttrsFromArtifact(bbArtifact);
if (listOfPossibleAttributes.size() > 1) {
//Don't display anything if there is more than 1 correlation property for an artifact but let the user know
description = Bundle.GetSCOTask_occurrences_multipleProperties();

View File

@ -65,8 +65,6 @@ FileSearchPanel.stepTwoLabel.text=Step 2: Filter which images to show
FileSearchPanel.stepThreeLabel.text=Step 3: Choose display settings
DiscoveryTopComponent.stepOneLabel.text=Step 1: Pick File Type
DiscoveryTopComponent.documentsButton.text=Documents
DocumentPanel.countLabel.toolTipText=
DocumentPanel.fileSizeLabel.toolTipText=
DocumentPanel.documentType.text=
DocumentPanel.isDeletedLabel.toolTipText=
ImageThumbnailPanel.isDeletedLabel.toolTipText=

View File

@ -14,8 +14,8 @@ DiscoveryUiUtility.megaBytes.text=MB
# {1} - units
DiscoveryUiUtility.sizeLabel.text=Size: {0} {1}
DiscoveryUiUtility.terraBytes.text=TB
# {0} - extension
DocumentPanel.documentType.extension.text=Extension: {0}
# {0} - otherInstanceCount
DocumentPanel.nameLabel.more.text=\ and {0} more
DocumentWrapper.previewInitialValue=Preview not generated yet.
FileGroup.groupSortingAlgorithm.groupName.text=Group Name
FileGroup.groupSortingAlgorithm.groupSize.text=Group Size
@ -24,6 +24,8 @@ FileGroup.groupSortingAlgorithm.groupSize.text=Group Size
FileSearch.DataSourceGroupKey.datasourceAndID={0}(ID: {1})
# {0} - Data source ID
FileSearch.DataSourceGroupKey.idOnly=Data source (ID: {0})
FileSearch.documentSummary.noBytes=No bytes read for document, unable to display preview.
FileSearch.documentSummary.noPreview=No preview available.
FileSearch.FileTagGroupKey.noSets=None
# {0} - file name
FileSearch.genVideoThumb.progress.text=extracting temporary file {0}
@ -173,9 +175,9 @@ FileSorter.SortingMethod.fullPath.displayName=Full Path
FileSorter.SortingMethod.keywordlist.displayName=Keyword List Names
GroupsListPanel.noResults.message.text=No results were found for the selected filters.
GroupsListPanel.noResults.title.text=No results found
# {0} - numberOfInstances
ImageThumbnailPanel.countLabel.text=Number of Instances: {0}
ImageThumbnailPanel.isDeleted.text=All instances of file are deleted.
# {0} - otherInstanceCount
ImageThumbnailPanel.nameLabel.more.text=\ and {0} more
OpenFileDiscoveryAction.resultsIncomplete.text=Results may be incomplete
ResultFile.score.interestingResult.description=At least one instance of the file has an interesting result associated with it.
ResultFile.score.notableFile.description=At least one instance of the file was recognized as notable.
@ -185,8 +187,7 @@ ResultFile.score.taggedFile.description=At least one instance of the file has be
# {1} - totalPages
ResultsPanel.currentPage.displayValue=Page: {0} of {1}
ResultsPanel.currentPageLabel.text=Page: -
ResultsPanel.documentPreviewWorker.noBytes=No bytes read for document, unable to display preview.
ResultsPanel.documentPreviewWorker.noPreview=No preview available.
ResultsPanel.documentPreview.text=Document preview creation cancelled.
# {0} - selectedPage
# {1} - maxPage
ResultsPanel.invalidPageNumber.message=The selected page number {0} does not exist. Please select a value from 1 to {1}.
@ -209,19 +210,18 @@ FileSearchPanel.stepTwoLabel.text=Step 2: Filter which images to show
FileSearchPanel.stepThreeLabel.text=Step 3: Choose display settings
DiscoveryTopComponent.stepOneLabel.text=Step 1: Pick File Type
DiscoveryTopComponent.documentsButton.text=Documents
DocumentPanel.countLabel.toolTipText=
DocumentPanel.fileSizeLabel.toolTipText=
DocumentPanel.documentType.text=
DocumentPanel.isDeletedLabel.toolTipText=
ImageThumbnailPanel.isDeletedLabel.toolTipText=
ResultsPanel.unableToCreate.text=Unable to create summary.
ResultsPanel.viewFileInDir.name=View File in Directory
VideoThumbnailPanel.bytes.text=bytes
# {0} - numberOfInstances
VideoThumbnailPanel.countLabel.text=Number of Instances: {0}
VideoThumbnailPanel.deleted.text=All instances of file are deleted.
VideoThumbnailPanel.gigaBytes.text=GB
VideoThumbnailPanel.kiloBytes.text=KB
VideoThumbnailPanel.megaBytes.text=MB
# {0} - otherInstanceCount
VideoThumbnailPanel.nameLabel.more.text=\ and {0} more
# {0} - fileSize
# {1} - units
VideoThumbnailPanel.sizeLabel.text=Size: {0} {1}

View File

@ -27,15 +27,14 @@
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<Component id="countLabel" min="-2" pref="530" max="-2" attributes="0"/>
<EmptySpace pref="81" max="32767" attributes="0"/>
<Component id="fileSizeLabel" max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="isDeletedLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" max="-2" attributes="0"/>
<Component id="scoreLabel" min="-2" max="-2" attributes="0"/>
</Group>
<Component id="fileSizeLabel" alignment="1" max="32767" attributes="0"/>
<Component id="previewScrollPane" max="32767" attributes="0"/>
<Component id="documentType" alignment="0" max="32767" attributes="0"/>
<Component id="previewScrollPane" pref="649" max="32767" attributes="0"/>
<Component id="nameLabel" alignment="0" max="32767" attributes="0"/>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
@ -45,16 +44,14 @@
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="1" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="documentType" min="-2" pref="16" max="-2" attributes="0"/>
<Component id="nameLabel" min="-2" pref="16" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="previewScrollPane" min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="16" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="scoreLabel" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="isDeletedLabel" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="countLabel" alignment="1" min="-2" max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="16" max="-2" attributes="0"/>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
@ -62,22 +59,6 @@
</DimensionLayout>
</Layout>
<SubComponents>
<Component class="javax.swing.JLabel" name="countLabel">
<Properties>
<Property name="toolTipText" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/filequery/Bundle.properties" key="DocumentPanel.countLabel.toolTipText" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[159, 12]"/>
</Property>
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[159, 12]"/>
</Property>
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[159, 12]"/>
</Property>
</Properties>
</Component>
<Component class="javax.swing.JLabel" name="isDeletedLabel">
<Properties>
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
@ -121,12 +102,7 @@
</Property>
</Properties>
</Component>
<Component class="javax.swing.JLabel" name="documentType">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/filequery/Bundle.properties" key="DocumentPanel.documentType.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Component class="javax.swing.JLabel" name="nameLabel">
</Component>
<Container class="javax.swing.JScrollPane" name="previewScrollPane">
<Properties>

View File

@ -52,21 +52,15 @@ public class DocumentPanel extends javax.swing.JPanel implements ListCellRendere
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
countLabel = new javax.swing.JLabel();
isDeletedLabel = new javax.swing.JLabel();
scoreLabel = new javax.swing.JLabel();
fileSizeLabel = new javax.swing.JLabel();
documentType = new javax.swing.JLabel();
nameLabel = new javax.swing.JLabel();
javax.swing.JScrollPane previewScrollPane = new javax.swing.JScrollPane();
previewTextArea = new javax.swing.JTextArea();
setBorder(javax.swing.BorderFactory.createEtchedBorder());
countLabel.setToolTipText(org.openide.util.NbBundle.getMessage(DocumentPanel.class, "DocumentPanel.countLabel.toolTipText")); // NOI18N
countLabel.setMaximumSize(new java.awt.Dimension(159, 12));
countLabel.setMinimumSize(new java.awt.Dimension(159, 12));
countLabel.setPreferredSize(new java.awt.Dimension(159, 12));
isDeletedLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/file-icon-deleted.png"))); // NOI18N
isDeletedLabel.setToolTipText(org.openide.util.NbBundle.getMessage(DocumentPanel.class, "DocumentPanel.isDeletedLabel.toolTipText")); // NOI18N
isDeletedLabel.setMaximumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
@ -81,8 +75,6 @@ public class DocumentPanel extends javax.swing.JPanel implements ListCellRendere
fileSizeLabel.setToolTipText(org.openide.util.NbBundle.getMessage(DocumentPanel.class, "DocumentPanel.fileSizeLabel.toolTipText")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(documentType, org.openide.util.NbBundle.getMessage(DocumentPanel.class, "DocumentPanel.documentType.text")); // NOI18N
previewScrollPane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
previewTextArea.setEditable(false);
@ -104,57 +96,55 @@ public class DocumentPanel extends javax.swing.JPanel implements ListCellRendere
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(countLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 530, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 81, Short.MAX_VALUE)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(isDeletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(fileSizeLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(previewScrollPane)
.addComponent(documentType, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(previewScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 649, Short.MAX_VALUE)
.addComponent(nameLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(documentType, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(nameLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(previewScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(isDeletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(countLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel countLabel;
private javax.swing.JLabel documentType;
private javax.swing.JLabel fileSizeLabel;
private javax.swing.JLabel isDeletedLabel;
private javax.swing.JLabel nameLabel;
private javax.swing.JTextArea previewTextArea;
private javax.swing.JLabel scoreLabel;
// End of variables declaration//GEN-END:variables
@Messages({"# {0} - extension",
"DocumentPanel.documentType.extension.text=Extension: {0}"})
@Messages({"# {0} - otherInstanceCount",
"DocumentPanel.nameLabel.more.text= and {0} more"})
@Override
public Component getListCellRendererComponent(JList<? extends DocumentWrapper> list, DocumentWrapper value, int index, boolean isSelected, boolean cellHasFocus) {
fileSizeLabel.setText(DiscoveryUiUtils.getFileSizeString(value.getResultFile().getFirstInstance().getSize()));
countLabel.setText(Bundle.ImageThumbnailPanel_countLabel_text(value.getResultFile().getAllInstances().size()));
documentType.setText(Bundle.DocumentPanel_documentType_extension_text(value.getResultFile().getFirstInstance().getNameExtension())); //WJS-TODO fill this in with a document type instead of just DOCUMENT
String nameText = value.getResultFile().getFirstInstance().getParentPath() + value.getResultFile().getFirstInstance().getName();
if (value.getResultFile().getAllInstances().size() > 1) {
nameText += Bundle.DocumentPanel_nameLabel_more_text(value.getResultFile().getAllInstances().size() - 1);
}
nameLabel.setText(nameText);
previewTextArea.setText(value.getPreview());
previewTextArea.setCaretPosition(0);
DiscoveryUiUtils.setDeletedIcon(value.getResultFile().isDeleted(), isDeletedLabel);
DiscoveryUiUtils.setScoreIcon(value.getResultFile(), scoreLabel);
setBackground(isSelected ? SELECTION_COLOR : list.getBackground());
return this;
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -25,11 +25,13 @@ import java.awt.Image;
import java.awt.image.BufferedImage;
import java.awt.image.RenderedImage;
import java.io.IOException;
import java.io.Reader;
import java.nio.file.Paths;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -43,9 +45,11 @@ import java.util.logging.Level;
import javax.imageio.ImageIO;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
import org.netbeans.api.progress.ProgressHandle;
import org.opencv.core.Mat;
import org.opencv.highgui.VideoCapture;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
@ -71,6 +75,9 @@ import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.textextractors.TextExtractor;
import org.sleuthkit.autopsy.textextractors.TextExtractorFactory;
import org.sleuthkit.autopsy.textsummarizer.TextSummarizer;
/**
* Main class to perform the file search.
@ -84,6 +91,8 @@ class FileSearch {
private static final Cache<SearchKey, Map<GroupKey, List<ResultFile>>> searchCache = CacheBuilder.newBuilder()
.maximumSize(MAXIMUM_CACHE_SIZE)
.build();
private static final int PREVIEW_SIZE = 256;
private static volatile TextSummarizer summarizerToUse = null;
/**
* Run the file search and returns the SearchResults object for debugging.
@ -239,6 +248,78 @@ class FileSearch {
return page;
}
/**
* Get a summary for the specified AbstractFile. If no TextSummarizers exist
* get the beginning of the file.
*
* @param file The AbstractFile to summarize.
*
* @return The summary or beginning of the specified file as a String.
*/
@NbBundle.Messages({"FileSearch.documentSummary.noPreview=No preview available.",
"FileSearch.documentSummary.noBytes=No bytes read for document, unable to display preview."})
static String summarize(AbstractFile file) {
String summary = null;
TextSummarizer localSummarizer = summarizerToUse;
if (localSummarizer == null) {
synchronized (searchCache) {
if (localSummarizer == null) {
localSummarizer = getLocalSummarizer();
}
}
}
if (localSummarizer != null) {
try {
//a summary of length 40 seems to fit without vertical scroll bars
summary = localSummarizer.summarize(file, 40);
} catch (IOException ex) {
return Bundle.FileSearch_documentSummary_noPreview();
}
}
if (StringUtils.isBlank(summary)) {
//no summarizer was found or summary was empty just grab the beginning of the file
summary = getFirstLines(file);
}
return summary;
}
/**
* Get the beginning of text from the specified AbstractFile.
*
* @param file The AbstractFile to get text from.
*
* @return The beginning of text from the specified AbstractFile.
*/
private static String getFirstLines(AbstractFile file) {
try (Reader reader = TextExtractorFactory.getExtractor(file, null).getReader()) {
char[] cbuf = new char[PREVIEW_SIZE];
reader.read(cbuf, 0, PREVIEW_SIZE);
return new String(cbuf);
} catch (IOException ex) {
return Bundle.FileSearch_documentSummary_noBytes();
} catch (TextExtractorFactory.NoTextExtractorFound | TextExtractor.InitReaderException ex) {
return Bundle.FileSearch_documentSummary_noPreview();
}
}
/**
* Get the first TextSummarizer found by a lookup of TextSummarizers.
*
* @return The first TextSummarizer found by a lookup of TextSummarizers.
*
* @throws IOException
*/
private static TextSummarizer getLocalSummarizer() {
Collection<? extends TextSummarizer> summarizers
= Lookup.getDefault().lookupAll(TextSummarizer.class
);
if (!summarizers.isEmpty()) {
summarizerToUse = summarizers.iterator().next();
return summarizerToUse;
}
return null;
}
/**
* Run the file search. Caching new results for access at later time.
*
@ -597,7 +678,6 @@ class FileSearch {
int framePos = Integer.valueOf(FilenameUtils.getBaseName(fileName).substring(2));
framePositions[thumbnailNumber] = framePos;
thumbnailNumber++;
}
thumbnailWrapper.setThumbnails(videoThumbnails, framePositions);
}

View File

@ -20,36 +20,36 @@
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Group type="103" groupAlignment="0" max="-2" attributes="0">
<Component id="thumbnailPanel" pref="201" max="32767" attributes="0"/>
<Component id="fileSizeLabel" alignment="0" max="32767" attributes="0"/>
</Group>
<Group type="102" attributes="0">
<Component id="countLabel" max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="163" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="isDeletedLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" max="-2" attributes="0"/>
<Component id="scoreLabel" min="-2" max="-2" attributes="0"/>
</Group>
<Group type="103" groupAlignment="1" max="-2" attributes="0">
<Component id="nameLabel" alignment="0" max="32767" attributes="0"/>
<Component id="thumbnailPanel" alignment="0" pref="201" max="32767" attributes="0"/>
</Group>
<EmptySpace min="-2" max="-2" attributes="0"/>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="nameLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="thumbnailPanel" min="-2" pref="178" max="-2" attributes="0"/>
<EmptySpace type="unrelated" max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="16" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="scoreLabel" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="isDeletedLabel" min="-2" max="-2" attributes="0"/>
<Component id="countLabel" alignment="1" min="-2" max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="16" max="-2" attributes="0"/>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
@ -82,7 +82,7 @@
<Property name="toolTipText" type="java.lang.String" value=""/>
</Properties>
</Component>
<Component class="javax.swing.JLabel" name="countLabel">
<Component class="javax.swing.JLabel" name="nameLabel">
<Properties>
<Property name="toolTipText" type="java.lang.String" value=""/>
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">

View File

@ -36,6 +36,7 @@ public class ImageThumbnailPanel extends javax.swing.JPanel implements ListCellR
private static final long serialVersionUID = 1L;
private static final Color SELECTION_COLOR = new Color(0, 120, 215);
private static final int MAX_NAME_STRING = 30;
/**
* Creates new form ImageThumbnailPanel
@ -56,7 +57,7 @@ public class ImageThumbnailPanel extends javax.swing.JPanel implements ListCellR
javax.swing.JPanel thumbnailPanel = new javax.swing.JPanel();
thumbnailLabel = new javax.swing.JLabel();
fileSizeLabel = new javax.swing.JLabel();
countLabel = new javax.swing.JLabel();
nameLabel = new javax.swing.JLabel();
isDeletedLabel = new javax.swing.JLabel();
scoreLabel = new javax.swing.JLabel();
@ -68,22 +69,22 @@ public class ImageThumbnailPanel extends javax.swing.JPanel implements ListCellR
fileSizeLabel.setToolTipText("");
countLabel.setToolTipText("");
countLabel.setMaximumSize(new java.awt.Dimension(159, 12));
countLabel.setMinimumSize(new java.awt.Dimension(159, 12));
countLabel.setPreferredSize(new java.awt.Dimension(159, 12));
nameLabel.setToolTipText("");
nameLabel.setMaximumSize(new java.awt.Dimension(159, 12));
nameLabel.setMinimumSize(new java.awt.Dimension(159, 12));
nameLabel.setPreferredSize(new java.awt.Dimension(159, 12));
isDeletedLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/file-icon-deleted.png"))); // NOI18N
isDeletedLabel.setToolTipText(org.openide.util.NbBundle.getMessage(ImageThumbnailPanel.class, "ImageThumbnailPanel.isDeletedLabel.toolTipText")); // NOI18N
isDeletedLabel.setMaximumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
isDeletedLabel.setMinimumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
isDeletedLabel.setPreferredSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
isDeletedLabel.setMaximumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
isDeletedLabel.setMinimumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
isDeletedLabel.setPreferredSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
scoreLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/red-circle-exclamation.png"))); // NOI18N
scoreLabel.setToolTipText("");
scoreLabel.setMaximumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setMinimumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setPreferredSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setMaximumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
scoreLabel.setMinimumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
scoreLabel.setPreferredSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
@ -92,50 +93,57 @@ public class ImageThumbnailPanel extends javax.swing.JPanel implements ListCellR
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(thumbnailPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 201, Short.MAX_VALUE)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addComponent(countLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 163, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(isDeletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING, false)
.addComponent(nameLabel, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(thumbnailPanel, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.DEFAULT_SIZE, 201, Short.MAX_VALUE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(nameLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(thumbnailPanel, javax.swing.GroupLayout.PREFERRED_SIZE, 178, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(isDeletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(countLabel, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 16, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel countLabel;
private javax.swing.JLabel fileSizeLabel;
private javax.swing.JLabel isDeletedLabel;
private javax.swing.JLabel nameLabel;
private javax.swing.JLabel scoreLabel;
private javax.swing.JLabel thumbnailLabel;
// End of variables declaration//GEN-END:variables
@NbBundle.Messages({
"# {0} - numberOfInstances",
"ImageThumbnailPanel.countLabel.text=Number of Instances: {0}",
"# {0} - otherInstanceCount",
"ImageThumbnailPanel.nameLabel.more.text= and {0} more",
"ImageThumbnailPanel.isDeleted.text=All instances of file are deleted."})
@Override
public Component getListCellRendererComponent(JList<? extends ImageThumbnailWrapper> list, ImageThumbnailWrapper value, int index, boolean isSelected, boolean cellHasFocus) {
fileSizeLabel.setText(DiscoveryUiUtils.getFileSizeString(value.getResultFile().getFirstInstance().getSize()));
countLabel.setText(Bundle.ImageThumbnailPanel_countLabel_text(value.getResultFile().getAllInstances().size()));
String nameText = value.getResultFile().getFirstInstance().getParentPath() + value.getResultFile().getFirstInstance().getName();
if (value.getResultFile().getAllInstances().size() > 1) {
nameText += Bundle.ImageThumbnailPanel_nameLabel_more_text(value.getResultFile().getAllInstances().size() - 1);
}
if (nameText.length() > MAX_NAME_STRING) {
nameText = "..." + nameText.substring(nameText.length() - (MAX_NAME_STRING - 3));
}
nameLabel.setText(nameText);
thumbnailLabel.setIcon(new ImageIcon(value.getThumbnail()));
DiscoveryUiUtils.setDeletedIcon(value.getResultFile().isDeleted(), isDeletedLabel);
DiscoveryUiUtils.setScoreIcon(value.getResultFile(), scoreLabel);
@ -163,5 +171,4 @@ public class ImageThumbnailPanel extends javax.swing.JPanel implements ListCellR
return null;
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy
*
* Copyright 2019 Basis Technology Corp.
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -28,6 +28,8 @@ import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javax.swing.DefaultComboBoxModel;
import javax.swing.DefaultListCellRenderer;
@ -38,14 +40,12 @@ import javax.swing.JPopupMenu;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import javax.swing.event.ListSelectionListener;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepoException;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.StringExtract;
import org.sleuthkit.autopsy.datamodel.FileNode;
import org.sleuthkit.autopsy.directorytree.ExternalViewerAction;
import org.sleuthkit.autopsy.directorytree.ViewContextAction;
@ -702,6 +702,13 @@ public class ResultsPanel extends javax.swing.JPanel {
@Override
protected void done() {
try {
get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.WARNING, "Video Worker Exception for file: " + thumbnailWrapper.getResultFile().getFirstInstance().getId(), ex);
} catch (CancellationException ignored) {
//we want to do nothing in response to this since we allow it to be cancelled
}
videoThumbnailViewer.repaint();
}
}
@ -736,6 +743,13 @@ public class ResultsPanel extends javax.swing.JPanel {
@Override
protected void done() {
try {
get();
} catch (InterruptedException | ExecutionException ex) {
logger.log(Level.WARNING, "Image Worker Exception for file: " + thumbnailWrapper.getResultFile().getFirstInstance().getId(), ex);
} catch (CancellationException ignored) {
//we want to do nothing in response to this since we allow it to be cancelled
}
imageThumbnailViewer.repaint();
}
@ -748,7 +762,6 @@ public class ResultsPanel extends javax.swing.JPanel {
private class DocumentPreviewWorker extends SwingWorker<Void, Void> {
private final DocumentWrapper documentWrapper;
private static final int PREVIEW_SIZE = 256;
/**
* Construct a new DocumentPreviewWorker.
@ -761,55 +774,29 @@ public class ResultsPanel extends javax.swing.JPanel {
documentPreviewViewer.addDocument(documentWrapper);
}
@Messages({"ResultsPanel.unableToCreate.text=Unable to create summary."})
@Override
protected Void doInBackground() throws Exception {
String preview = createPreview(documentWrapper.getResultFile().getFirstInstance());
if (preview != null) {
documentWrapper.setPreview(preview);
String preview = FileSearch.summarize(documentWrapper.getResultFile().getFirstInstance());
if (preview == null) {
preview = Bundle.ResultsPanel_unableToCreate_text();
}
documentWrapper.setPreview(preview);
return null;
}
/**
* Create the string that will be used as the preview for the specified
* AbstractFile.
*
* @param file The AbstractFile to create the preview for.
*
* @return The String which is the preview for the specified
* AbstractFile.
*/
@Messages({"ResultsPanel.documentPreviewWorker.noPreview=No preview available.",
"ResultsPanel.documentPreviewWorker.noBytes=No bytes read for document, unable to display preview."})
private String createPreview(AbstractFile file) {
byte[] data = new byte[PREVIEW_SIZE];
int bytesRead = 0;
if (file.getSize() > 0) {
try {
int length = PREVIEW_SIZE > file.getSize() ? (int) file.getSize() : PREVIEW_SIZE; //if the size is less than the int it can be cast to an int
bytesRead = file.read(data, 0, length); // read the data
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error while trying to show the String content.", ex); //NON-NLS
}
}
String text;
if (bytesRead > 0) {
StringExtract stringExtract = new StringExtract();
final StringExtract.StringExtractUnicodeTable.SCRIPT selScript = StringExtract.StringExtractUnicodeTable.SCRIPT.LATIN_1;
stringExtract.setEnabledScript(selScript);
StringExtract.StringExtractResult res = stringExtract.extract(data, bytesRead, 0);
text = res.getText();
if (StringUtils.isBlank(text)) {
text = Bundle.ResultsPanel_documentPreviewWorker_noPreview();
}
} else {
text = Bundle.ResultsPanel_documentPreviewWorker_noBytes();
}
return text;
}
@Messages({"ResultsPanel.documentPreview.text=Document preview creation cancelled."})
@Override
protected void done() {
try {
get();
} catch (InterruptedException | ExecutionException ex) {
documentWrapper.setPreview(ex.getMessage());
logger.log(Level.WARNING, "Document Worker Exception", ex);
} catch (CancellationException ignored) {
documentWrapper.setPreview(Bundle.ResultsPanel_documentPreview_text());
//we want to do nothing in response to this since we allow it to be cancelled
}
documentPreviewViewer.repaint();
}

View File

@ -24,38 +24,36 @@
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="imagePanel" alignment="0" pref="776" max="32767" attributes="0"/>
<Group type="102" alignment="0" attributes="0">
<Component id="fileSizeLabel" min="-2" pref="248" max="-2" attributes="0"/>
<EmptySpace max="32767" attributes="0"/>
<Component id="countLabel" min="-2" pref="124" max="-2" attributes="0"/>
<Component id="fileSizeLabel" max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="deletedLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="scoreLabel" min="-2" max="-2" attributes="0"/>
</Group>
<Component id="nameLabel" alignment="0" max="32767" attributes="0"/>
</Group>
<EmptySpace min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="nameLabel" min="-2" pref="14" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="imagePanel" min="-2" pref="140" max="-2" attributes="0"/>
<EmptySpace type="unrelated" min="-2" max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="fileSizeLabel" min="-2" pref="19" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="1" attributes="0">
<Component id="deletedLabel" min="-2" max="-2" attributes="0"/>
<Component id="fileSizeLabel" min="-2" pref="14" max="-2" attributes="0"/>
<Component id="scoreLabel" min="-2" max="-2" attributes="0"/>
<Component id="countLabel" alignment="1" min="-2" pref="19" max="-2" attributes="0"/>
</Group>
</Group>
<EmptySpace max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
@ -67,7 +65,7 @@
</Container>
<Component class="javax.swing.JLabel" name="fileSizeLabel">
</Component>
<Component class="javax.swing.JLabel" name="countLabel">
<Component class="javax.swing.JLabel" name="nameLabel">
</Component>
<Component class="javax.swing.JLabel" name="scoreLabel">
<Properties>

View File

@ -100,7 +100,7 @@ final class VideoThumbnailPanel extends javax.swing.JPanel implements ListCellRe
imagePanel = new javax.swing.JPanel();
fileSizeLabel = new javax.swing.JLabel();
countLabel = new javax.swing.JLabel();
nameLabel = new javax.swing.JLabel();
scoreLabel = new javax.swing.JLabel();
deletedLabel = new javax.swing.JLabel();
@ -109,14 +109,14 @@ final class VideoThumbnailPanel extends javax.swing.JPanel implements ListCellRe
imagePanel.setLayout(new java.awt.GridBagLayout());
scoreLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/red-circle-exclamation.png"))); // NOI18N
scoreLabel.setMaximumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setMinimumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setPreferredSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
scoreLabel.setMaximumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
scoreLabel.setMinimumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
scoreLabel.setPreferredSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
deletedLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/images/file-icon-deleted.png"))); // NOI18N
deletedLabel.setMaximumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
deletedLabel.setMinimumSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
deletedLabel.setPreferredSize(new Dimension(DiscoveryUiUtils.getIconSize(),DiscoveryUiUtils.getIconSize()));
deletedLabel.setMaximumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
deletedLabel.setMinimumSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
deletedLabel.setPreferredSize(new Dimension(org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize(),org.sleuthkit.autopsy.filequery.DiscoveryUiUtils.getIconSize()));
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
@ -127,48 +127,51 @@ final class VideoThumbnailPanel extends javax.swing.JPanel implements ListCellRe
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(imagePanel, javax.swing.GroupLayout.DEFAULT_SIZE, 776, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 248, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(countLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 124, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(deletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(nameLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(nameLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(imagePanel, javax.swing.GroupLayout.PREFERRED_SIZE, 140, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 19, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(deletedLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(countLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 19, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(fileSizeLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(scoreLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel countLabel;
private javax.swing.JLabel deletedLabel;
private javax.swing.JLabel fileSizeLabel;
private javax.swing.JPanel imagePanel;
private javax.swing.JLabel nameLabel;
private javax.swing.JLabel scoreLabel;
// End of variables declaration//GEN-END:variables
@Messages({
"# {0} - numberOfInstances",
"VideoThumbnailPanel.countLabel.text=Number of Instances: {0}",
"# {0} - otherInstanceCount",
"VideoThumbnailPanel.nameLabel.more.text= and {0} more",
"VideoThumbnailPanel.deleted.text=All instances of file are deleted."})
@Override
public Component getListCellRendererComponent(JList<? extends VideoThumbnailsWrapper> list, VideoThumbnailsWrapper value, int index, boolean isSelected, boolean cellHasFocus) {
fileSizeLabel.setText(getFileSizeString(value.getResultFile().getFirstInstance().getSize()));
countLabel.setText(Bundle.VideoThumbnailPanel_countLabel_text(value.getResultFile().getAllInstances().size()));
String nameText = value.getResultFile().getFirstInstance().getParentPath() + value.getResultFile().getFirstInstance().getName();
if (value.getResultFile().getAllInstances().size() > 1) {
nameText += Bundle.VideoThumbnailPanel_nameLabel_more_text(value.getResultFile().getAllInstances().size() - 1);
}
nameLabel.setText(nameText);
addThumbnails(value);
imagePanel.setBackground(isSelected ? SELECTION_COLOR : list.getBackground());
DiscoveryUiUtils.setDeletedIcon(value.getResultFile().isDeleted(), deletedLabel);

View File

@ -0,0 +1,251 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.geolocation;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.geolocation.datamodel.GeoLocationDataException;
import org.sleuthkit.autopsy.geolocation.datamodel.Track;
import org.sleuthkit.autopsy.geolocation.datamodel.Waypoint;
import org.sleuthkit.autopsy.geolocation.datamodel.WaypointBuilder;
/**
* The business logic for filtering waypoints.
*/
abstract class AbstractWaypointFetcher implements WaypointBuilder.WaypointFilterQueryCallBack {
private static final Logger logger = Logger.getLogger(AbstractWaypointFetcher.class.getName());
private final GeoFilterPanel.GeoFilter filters;
/**
* Constructs the Waypoint Runner
*
* @param filters
*/
AbstractWaypointFetcher(GeoFilterPanel.GeoFilter filters) {
this.filters = filters;
}
/**
* Gets the waypoints based in the current GeoFilter.
*
* This function kicks off a process that will send with
* handleFilteredWaypointSet being called. Subclasses must implement
* handleFitleredWayoiintSet to get the final results.
*
* @throws GeoLocationDataException
*/
void getWaypoints() throws GeoLocationDataException {
Case currentCase = Case.getCurrentCase();
WaypointBuilder.getAllWaypoints(currentCase.getSleuthkitCase(),
filters.getDataSources(),
filters.showAllWaypoints(),
filters.getMostRecentNumDays(),
filters.showWaypointsWithoutTimeStamp(),
this);
}
/**
* Called after all of the MapWaypoints are created from all of the
* TSK_GPS_XXX objects.
*
* @param mapWaypoints List of filtered MapWaypoints.
*/
abstract void handleFilteredWaypointSet(Set<MapWaypoint> mapWaypoints);
@Override
public void process(List<Waypoint> waypoints) {
List<Track> tracks = null;
try {
tracks = Track.getTracks(Case.getCurrentCase().getSleuthkitCase(), filters.getDataSources());
} catch (GeoLocationDataException ex) {
logger.log(Level.WARNING, "Exception thrown while retrieving list of Tracks", ex);
}
List<Waypoint> completeList = createWaypointList(waypoints, tracks);
final Set<MapWaypoint> pointSet = MapWaypoint.getWaypoints(completeList);
handleFilteredWaypointSet(pointSet);
}
/**
* Returns a complete list of waypoints including the tracks. Takes into
* account the current filters and includes waypoints as approprate.
*
* @param waypoints List of waypoints
* @param tracks List of tracks
*
* @return A list of waypoints including the tracks based on the current
* filters.
*/
private List<Waypoint> createWaypointList(List<Waypoint> waypoints, List<Track> tracks) {
final List<Waypoint> completeList = new ArrayList<>();
if (tracks != null) {
Long timeRangeEnd;
Long timeRangeStart;
if (!filters.showAllWaypoints()) {
// Figure out what the most recent time is given the filtered
// waypoints and the tracks.
timeRangeEnd = getMostRecent(waypoints, tracks);
timeRangeStart = timeRangeEnd - (86400 * filters.getMostRecentNumDays());
completeList.addAll(getWaypointsInRange(timeRangeStart, timeRangeEnd, waypoints));
completeList.addAll(getTracksInRange(timeRangeStart, timeRangeEnd, tracks));
} else {
completeList.addAll(waypoints);
for (Track track : tracks) {
completeList.addAll(track.getPath());
}
}
} else {
completeList.addAll(waypoints);
}
return completeList;
}
/**
* Return a list of waypoints that fall into the given time range.
*
* @param timeRangeStart start timestamp of range (seconds from java epoch)
* @param timeRangeEnd start timestamp of range (seconds from java epoch)
* @param waypoints List of waypoints to filter.
*
* @return A list of waypoints that fall into the time range.
*/
private List<Waypoint> getWaypointsInRange(Long timeRangeStart, Long timeRangeEnd, List<Waypoint> waypoints) {
List<Waypoint> completeList = new ArrayList<>();
// Add all of the waypoints that fix into the time range.
if (waypoints != null) {
for (Waypoint point : waypoints) {
Long time = point.getTimestamp();
if ((time == null && filters.showWaypointsWithoutTimeStamp())
|| (time != null && (time >= timeRangeStart && time <= timeRangeEnd))) {
completeList.add(point);
}
}
}
return completeList;
}
/**
* Return a list of waypoints from the given tracks that fall into for
* tracks that fall into the given time range. The track start time will
* used for determining if the whole track falls into the range.
*
* @param timeRangeStart start timestamp of range (seconds from java epoch)
* @param timeRangeEnd start timestamp of range (seconds from java epoch)
* @param tracks Track list.
*
* @return A list of waypoints that that belong to tracks that fall into the
* time range.
*/
private List<Waypoint> getTracksInRange(Long timeRangeStart, Long timeRangeEnd, List<Track> tracks) {
List<Waypoint> completeList = new ArrayList<>();
if (tracks != null) {
for (Track track : tracks) {
Long trackTime = track.getStartTime();
if ((trackTime == null && filters.showWaypointsWithoutTimeStamp())
|| (trackTime != null && (trackTime >= timeRangeStart && trackTime <= timeRangeEnd))) {
completeList.addAll(track.getPath());
}
}
}
return completeList;
}
/**
* Find the latest time stamp in the given list of waypoints.
*
* @param points List of Waypoints, required.
*
* @return The latest time stamp (seconds from java epoch)
*/
private Long findMostRecentTimestamp(List<Waypoint> points) {
Long mostRecent = null;
for (Waypoint point : points) {
if (mostRecent == null) {
mostRecent = point.getTimestamp();
} else {
mostRecent = Math.max(mostRecent, point.getTimestamp());
}
}
return mostRecent;
}
/**
* Find the latest time stamp in the given list of tracks.
*
* @param tracks List of Waypoints, required.
*
* @return The latest time stamp (seconds from java epoch)
*/
private Long findMostRecentTracks(List<Track> tracks) {
Long mostRecent = null;
for (Track track : tracks) {
if (mostRecent == null) {
mostRecent = track.getStartTime();
} else {
mostRecent = Math.max(mostRecent, track.getStartTime());
}
}
return mostRecent;
}
/**
* Returns the "most recent" timestamp amount the list of waypoints and
* track points.
*
* @param points List of Waypoints
* @param tracks List of Tracks
*
* @return Latest time stamp (seconds from java epoch)
*/
private Long getMostRecent(List<Waypoint> points, List<Track> tracks) {
Long waypointMostRecent = findMostRecentTimestamp(points);
Long trackMostRecent = findMostRecentTracks(tracks);
if (waypointMostRecent != null && trackMostRecent != null) {
return Math.max(waypointMostRecent, trackMostRecent);
} else if (waypointMostRecent == null && trackMostRecent != null) {
return trackMostRecent;
} else if (waypointMostRecent != null && trackMostRecent == null) {
return waypointMostRecent;
}
return null;
}
}

View File

@ -30,6 +30,8 @@ GeoTopComponent_no_waypoints_returned_mgs=Applied filter failed to find waypoint
GeoTopComponent_no_waypoints_returned_Title=No Waypoints Found
GLTopComponent_initilzation_error=An error occurred during waypoint initilization. Geolocation data maybe incomplete.
GLTopComponent_name=Geolocation
GLTopComponent_No_dataSource_message=There are no data sources with Geolocation artifacts found.
GLTopComponent_No_dataSource_Title=No Geolocation artifacts found
HidingPane_default_title=Filters
MapPanel_connection_failure_message=Failed to connect to new geolocation map tile source.
MapPanel_connection_failure_message_title=Connection Failure

View File

@ -67,6 +67,10 @@ final class CheckBoxListPanel<T> extends javax.swing.JPanel {
model.removeAllElements();
}
boolean isEmpty() {
return model.isEmpty();
}
@Override
public void setEnabled(boolean enabled) {
checkboxList.setEnabled(enabled);

View File

@ -20,14 +20,19 @@ package org.sleuthkit.autopsy.geolocation;
import java.awt.GridBagConstraints;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.logging.Level;
import javafx.util.Pair;
import javax.swing.ImageIcon;
import javax.swing.SpinnerNumberModel;
import javax.swing.SwingWorker;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -38,12 +43,25 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
class GeoFilterPanel extends javax.swing.JPanel {
final static String INITPROPERTY = "FilterPanelInitCompleted";
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(GeoFilterPanel.class.getName());
private final SpinnerNumberModel numberModel;
private final CheckBoxListPanel<DataSource> checkboxPanel;
// Make sure to update if
@SuppressWarnings("deprecation")
private static final BlackboardArtifact.ARTIFACT_TYPE[] GPS_ARTIFACT_TYPES = {
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK,
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION,
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_ROUTE,
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_SEARCH,
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACK,
BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT
};
/**
* Creates new GeoFilterPanel
*/
@ -89,11 +107,8 @@ class GeoFilterPanel extends javax.swing.JPanel {
* Update the data source list with the current data sources
*/
void updateDataSourceList() {
try {
initCheckboxList();
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to initialize the CheckboxListPane", ex); //NON-NLS
}
DataSourceUpdater updater = new DataSourceUpdater();
updater.execute();
}
/**
@ -103,6 +118,10 @@ class GeoFilterPanel extends javax.swing.JPanel {
checkboxPanel.clearList();
}
boolean hasDataSources() {
return !checkboxPanel.isEmpty();
}
/**
* Adds an actionListener to listen for the filter apply action
*
@ -134,20 +153,6 @@ class GeoFilterPanel extends javax.swing.JPanel {
dataSources);
}
/**
* Initialize the checkbox list panel
*
* @throws TskCoreException
*/
private void initCheckboxList() throws TskCoreException {
final SleuthkitCase sleuthkitCase = Case.getCurrentCase().getSleuthkitCase();
for (DataSource dataSource : sleuthkitCase.getDataSources()) {
String dsName = sleuthkitCase.getContentById(dataSource.getId()).getName();
checkboxPanel.addElement(dsName, dataSource);
}
}
/**
* Based on the state of mostRecent radio button Change the state of the cnt
* spinner and the time stamp checkbox.
@ -377,4 +382,72 @@ class GeoFilterPanel extends javax.swing.JPanel {
}
}
/**
* SwingWorker for updating the list of valid data sources.
*
* doInBackground creates a list of Pair objects that contain the
* display name of the data source and the data source object.
*/
final private class DataSourceUpdater extends SwingWorker<List<Pair<String, DataSource>>, Void> {
@Override
protected List<Pair<String, DataSource>> doInBackground() throws Exception {
SleuthkitCase sleuthkitCase = Case.getCurrentCase().getSleuthkitCase();
List<Pair<String, DataSource>> validSources = new ArrayList<>();
for (DataSource dataSource : sleuthkitCase.getDataSources()) {
if (isGPSDataSource(sleuthkitCase, dataSource)) {
String dsName = sleuthkitCase.getContentById(dataSource.getId()).getName();
Pair<String, DataSource> pair = new Pair<>(dsName, dataSource);
validSources.add(pair);
}
}
return validSources;
}
/**
* Returns whether or not the given data source has GPS artifacts.
*
* @param sleuthkitCase The current sleuthkitCase
* @param dataSource
*
* @return True if the data source as at least one TSK_GPS_XXXX
*
* @throws TskCoreException
*/
private boolean isGPSDataSource(SleuthkitCase sleuthkitCase, DataSource dataSource) throws TskCoreException {
for (BlackboardArtifact.ARTIFACT_TYPE type : GPS_ARTIFACT_TYPES) {
if (sleuthkitCase.getBlackboardArtifactsTypeCount(type.getTypeID(), dataSource.getId()) > 0) {
return true;
}
}
return false;
}
@Override
public void done() {
List<Pair<String, DataSource>> sources = null;
try {
sources = get();
} catch (InterruptedException | ExecutionException ex) {
Throwable cause = ex.getCause();
if (cause != null) {
logger.log(Level.SEVERE, cause.getMessage(), cause);
} else {
logger.log(Level.SEVERE, ex.getMessage(), ex);
}
}
if (sources != null) {
for (Pair<String, DataSource> source : sources) {
checkboxPanel.addElement(source.getKey(), source.getValue());
}
}
GeoFilterPanel.this.firePropertyChange(INITPROPERTY, false, true);
}
}
}

View File

@ -28,7 +28,6 @@ import java.io.File;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
import java.util.LinkedHashSet;
@ -51,10 +50,6 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
import org.sleuthkit.autopsy.geolocation.GeoFilterPanel.GeoFilter;
import org.sleuthkit.autopsy.geolocation.datamodel.GeoLocationDataException;
import org.sleuthkit.autopsy.geolocation.datamodel.Track;
import org.sleuthkit.autopsy.geolocation.datamodel.Waypoint;
import org.sleuthkit.autopsy.geolocation.datamodel.WaypointBuilder;
import org.sleuthkit.autopsy.geolocation.datamodel.WaypointBuilder.WaypointFilterQueryCallBack;
import org.sleuthkit.autopsy.ingest.IngestManager;
import static org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent.DATA_ADDED;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
@ -93,7 +88,9 @@ public final class GeolocationTopComponent extends TopComponent {
@Messages({
"GLTopComponent_name=Geolocation",
"GLTopComponent_initilzation_error=An error occurred during waypoint initilization. Geolocation data maybe incomplete."
"GLTopComponent_initilzation_error=An error occurred during waypoint initilization. Geolocation data maybe incomplete.",
"GLTopComponent_No_dataSource_message=There are no data sources with Geolocation artifacts found.",
"GLTopComponent_No_dataSource_Title=No Geolocation artifacts found"
})
/**
@ -144,7 +141,6 @@ public final class GeolocationTopComponent extends TopComponent {
public void actionPerformed(ActionEvent e) {
geoFilterPanel.updateDataSourceList();
mapPanel.clearWaypoints();
updateWaypoints();
showRefreshPanel(false);
}
});
@ -158,6 +154,24 @@ public final class GeolocationTopComponent extends TopComponent {
}
});
geoFilterPanel.addPropertyChangeListener(GeoFilterPanel.INITPROPERTY, new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (geoFilterPanel.hasDataSources()) {
updateWaypoints();
} else {
geoFilterPanel.setEnabled(false);
setWaypointLoading(false);
JOptionPane.showMessageDialog(GeolocationTopComponent.this,
Bundle.GLTopComponent_No_dataSource_message(),
Bundle.GLTopComponent_No_dataSource_Title(),
JOptionPane.ERROR_MESSAGE);
}
}
});
mapPanel.addPropertyChangeListener(MapPanel.CURRENT_MOUSE_GEOPOSITION, new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
@ -201,9 +215,6 @@ public final class GeolocationTopComponent extends TopComponent {
@Override
public void open() {
super.open();
mapPanel.clearWaypoints();
geoFilterPanel.clearDataSourceList();
geoFilterPanel.updateDataSourceList();
// Let's make sure we only do this on the first open
if (!mapInitalized) {
@ -222,8 +233,12 @@ public final class GeolocationTopComponent extends TopComponent {
return; // Doen't set the waypoints.
}
}
mapPanel.clearWaypoints();
geoFilterPanel.clearDataSourceList();
geoFilterPanel.updateDataSourceList();
mapPanel.setWaypoints(new LinkedHashSet<>());
updateWaypoints();
}
/**
@ -237,8 +252,8 @@ public final class GeolocationTopComponent extends TopComponent {
public void run() {
boolean isShowing = false;
Component[] comps = mapPanel.getComponents();
for(Component comp: comps) {
if(comp.equals(refreshPanel)) {
for (Component comp : comps) {
if (comp.equals(refreshPanel)) {
isShowing = true;
break;
}
@ -246,7 +261,7 @@ public final class GeolocationTopComponent extends TopComponent {
if (show && !isShowing) {
mapPanel.add(refreshPanel, BorderLayout.NORTH);
mapPanel.revalidate();
} else if(!show && isShowing){
} else if (!show && isShowing) {
mapPanel.remove(refreshPanel);
mapPanel.revalidate();
}
@ -284,10 +299,61 @@ public final class GeolocationTopComponent extends TopComponent {
setWaypointLoading(true);
geoFilterPanel.setEnabled(false);
Thread thread = new Thread(new WaypointRunner(filters));
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
(new WaypointFetcher(filters)).getWaypoints();
} catch (GeoLocationDataException ex) {
logger.log(Level.SEVERE, "Failed to filter waypoints.", ex);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JOptionPane.showMessageDialog(GeolocationTopComponent.this,
Bundle.GeoTopComponent_filter_exception_Title(),
Bundle.GeoTopComponent_filter_exception_msg(),
JOptionPane.ERROR_MESSAGE);
setWaypointLoading(false);
}
});
}
}
});
thread.start();
}
/**
* Add the filtered set of waypoints to the map and set the various window
* components to their proper state.
*
* @param waypointList
*/
void addWaypointsToMap(Set<MapWaypoint> waypointList) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// If the list is empty, tell the user
if (waypointList == null || waypointList.isEmpty()) {
mapPanel.clearWaypoints();
JOptionPane.showMessageDialog(GeolocationTopComponent.this,
Bundle.GeoTopComponent_no_waypoints_returned_Title(),
Bundle.GeoTopComponent_no_waypoints_returned_mgs(),
JOptionPane.INFORMATION_MESSAGE);
setWaypointLoading(false);
geoFilterPanel.setEnabled(true);
return;
}
mapPanel.clearWaypoints();
mapPanel.setWaypoints(waypointList);
setWaypointLoading(false);
geoFilterPanel.setEnabled(true);
}
});
}
/**
* Show or hide the waypoint loading progress bar.
*
@ -424,244 +490,18 @@ public final class GeolocationTopComponent extends TopComponent {
// End of variables declaration//GEN-END:variables
/**
* A runnable class for getting waypoints based on the current filters.
* Extends AbstractWaypointFetcher to handle the returning of
* the filters set of MapWaypoints.
*/
private class WaypointRunner implements Runnable, WaypointFilterQueryCallBack {
final private class WaypointFetcher extends AbstractWaypointFetcher {
private final GeoFilter filters;
/**
* Constructs the Waypoint Runner
*
* @param filters
*/
WaypointRunner(GeoFilter filters) {
this.filters = filters;
WaypointFetcher(GeoFilter filters) {
super(filters);
}
@Override
public void run() {
Case currentCase = Case.getCurrentCase();
try {
WaypointBuilder.getAllWaypoints(currentCase.getSleuthkitCase(),
filters.getDataSources(),
filters.showAllWaypoints(),
filters.getMostRecentNumDays(),
filters.showWaypointsWithoutTimeStamp(),
this);
} catch (GeoLocationDataException ex) {
logger.log(Level.SEVERE, "Failed to filter waypoints.", ex);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
JOptionPane.showMessageDialog(GeolocationTopComponent.this,
Bundle.GeoTopComponent_filter_exception_Title(),
Bundle.GeoTopComponent_filter_exception_msg(),
JOptionPane.ERROR_MESSAGE);
setWaypointLoading(false);
}
});
}
}
@Override
public void process(List<Waypoint> waypoints) {
List<Track> tracks = null;
try {
tracks = Track.getTracks(Case.getCurrentCase().getSleuthkitCase(), filters.getDataSources());
} catch (GeoLocationDataException ex) {
logger.log(Level.WARNING, "Exception thrown while retrieving list of Tracks", ex);
}
List<Waypoint> completeList = createWaypointList(waypoints, tracks);
final Set<MapWaypoint> pointSet = MapWaypoint.getWaypoints(completeList);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
// If the list is empty, tell the user and do not change
// the visible waypoints.
if (completeList == null || completeList.isEmpty()) {
mapPanel.clearWaypoints();
JOptionPane.showMessageDialog(GeolocationTopComponent.this,
Bundle.GeoTopComponent_no_waypoints_returned_Title(),
Bundle.GeoTopComponent_no_waypoints_returned_mgs(),
JOptionPane.INFORMATION_MESSAGE);
setWaypointLoading(false);
geoFilterPanel.setEnabled(true);
return;
}
mapPanel.clearWaypoints();
mapPanel.setWaypoints(pointSet);
setWaypointLoading(false);
geoFilterPanel.setEnabled(true);
}
});
}
/**
* Returns a complete list of waypoints including the tracks. Takes into
* account the current filters and includes waypoints as approprate.
*
* @param waypoints List of waypoints
* @param tracks List of tracks
*
* @return A list of waypoints including the tracks based on the current
* filters.
*/
private List<Waypoint> createWaypointList(List<Waypoint> waypoints, List<Track> tracks) {
final List<Waypoint> completeList = new ArrayList<>();
if (tracks != null) {
Long timeRangeEnd;
Long timeRangeStart;
if (!filters.showAllWaypoints()) {
// Figure out what the most recent time is given the filtered
// waypoints and the tracks.
timeRangeEnd = getMostRecent(waypoints, tracks);
timeRangeStart = timeRangeEnd - (86400 * filters.getMostRecentNumDays());
completeList.addAll(getWaypointsInRange(timeRangeStart, timeRangeEnd, waypoints));
completeList.addAll(getTracksInRange(timeRangeStart, timeRangeEnd, tracks));
} else {
completeList.addAll(waypoints);
for (Track track : tracks) {
completeList.addAll(track.getPath());
}
}
} else {
completeList.addAll(waypoints);
}
return completeList;
}
/**
* Return a list of waypoints that fall into the given time range.
*
* @param timeRangeStart start timestamp of range (seconds from java
* epoch)
* @param timeRangeEnd start timestamp of range (seconds from java
* epoch)
* @param waypoints List of waypoints to filter.
*
* @return A list of waypoints that fall into the time range.
*/
private List<Waypoint> getWaypointsInRange(Long timeRangeStart, Long timeRangeEnd, List<Waypoint> waypoints) {
List<Waypoint> completeList = new ArrayList<>();
// Add all of the waypoints that fix into the time range.
if (waypoints != null) {
for (Waypoint point : waypoints) {
Long time = point.getTimestamp();
if ((time == null && filters.showWaypointsWithoutTimeStamp())
|| (time != null && (time >= timeRangeStart && time <= timeRangeEnd))) {
completeList.add(point);
}
}
}
return completeList;
}
/**
* Return a list of waypoints from the given tracks that fall into for
* tracks that fall into the given time range. The track start time will
* used for determining if the whole track falls into the range.
*
* @param timeRangeStart start timestamp of range (seconds from java
* epoch)
* @param timeRangeEnd start timestamp of range (seconds from java
* epoch)
* @param tracks Track list.
*
* @return A list of waypoints that that belong to tracks that fall into
* the time range.
*/
private List<Waypoint> getTracksInRange(Long timeRangeStart, Long timeRangeEnd, List<Track> tracks) {
List<Waypoint> completeList = new ArrayList<>();
if (tracks != null) {
for (Track track : tracks) {
Long trackTime = track.getStartTime();
if ((trackTime == null && filters.showWaypointsWithoutTimeStamp())
|| (trackTime != null && (trackTime >= timeRangeStart && trackTime <= timeRangeEnd))) {
completeList.addAll(track.getPath());
}
}
}
return completeList;
}
/**
* Find the latest time stamp in the given list of waypoints.
*
* @param points List of Waypoints, required.
*
* @return The latest time stamp (seconds from java epoch)
*/
private Long findMostRecentTimestamp(List<Waypoint> points) {
Long mostRecent = null;
for (Waypoint point : points) {
if (mostRecent == null) {
mostRecent = point.getTimestamp();
} else {
mostRecent = Math.max(mostRecent, point.getTimestamp());
}
}
return mostRecent;
}
/**
* Find the latest time stamp in the given list of tracks.
*
* @param tracks List of Waypoints, required.
*
* @return The latest time stamp (seconds from java epoch)
*/
private Long findMostRecentTracks(List<Track> tracks) {
Long mostRecent = null;
for (Track track : tracks) {
if (mostRecent == null) {
mostRecent = track.getStartTime();
} else {
mostRecent = Math.max(mostRecent, track.getStartTime());
}
}
return mostRecent;
}
/**
* Returns the "most recent" timestamp amount the list of waypoints and
* track points.
*
* @param points List of Waypoints
* @param tracks List of Tracks
*
* @return Latest time stamp (seconds from java epoch)
*/
private Long getMostRecent(List<Waypoint> points, List<Track> tracks) {
Long waypointMostRecent = findMostRecentTimestamp(points);
Long trackMostRecent = findMostRecentTracks(tracks);
if (waypointMostRecent != null && trackMostRecent != null) {
return Math.max(waypointMostRecent, trackMostRecent);
} else if (waypointMostRecent == null && trackMostRecent != null) {
return trackMostRecent;
} else if (waypointMostRecent != null && trackMostRecent == null) {
return waypointMostRecent;
}
return null;
void handleFilteredWaypointSet(Set<MapWaypoint> mapWaypoints) {
addWaypointsToMap(mapWaypoints);
}
}
}

View File

@ -93,31 +93,23 @@
<Component id="jScrollPane1" min="-2" max="-2" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="1" attributes="0">
<Component id="informationLabel" pref="0" max="32767" attributes="0"/>
<EmptySpace min="-2" pref="356" max="-2" attributes="0"/>
</Group>
<Component id="informationScrollPanel" alignment="0" max="32767" attributes="0"/>
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" attributes="0">
<Component id="indexButton" min="-2" max="-2" attributes="0"/>
<EmptySpace type="unrelated" max="-2" attributes="0"/>
<Component id="addHashesToDatabaseButton" min="-2" max="-2" attributes="0"/>
<EmptySpace max="32767" attributes="0"/>
</Group>
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Component id="informationScrollPanel" alignment="0" pref="420" max="32767" attributes="0"/>
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Component id="sendIngestMessagesCheckBox" min="-2" max="-2" attributes="0"/>
<Component id="ingestWarningLabel" alignment="0" min="-2" max="-2" attributes="0"/>
<Component id="informationLabel" alignment="0" min="-2" pref="197" max="-2" attributes="0"/>
</Group>
<EmptySpace min="0" pref="0" max="32767" attributes="0"/>
</Group>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</Group>
<Group type="102" attributes="0">
<Group type="103" groupAlignment="0" attributes="0">
<Component id="hashDatabasesLabel" min="-2" max="-2" attributes="0"/>
@ -298,11 +290,6 @@
</Component>
<Component class="javax.swing.JLabel" name="informationLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="informationLabel" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties" key="HashLookupSettingsPanel.informationLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
@ -418,11 +405,6 @@
<SubComponents>
<Component class="javax.swing.JLabel" name="nameLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="nameLabel" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties" key="HashLookupSettingsPanel.nameLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
@ -442,11 +424,6 @@
</Component>
<Component class="javax.swing.JLabel" name="typeLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="typeLabel" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties" key="HashLookupSettingsPanel.typeLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
@ -466,11 +443,6 @@
</Component>
<Component class="javax.swing.JLabel" name="locationLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="locationLabel" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties" key="HashLookupSettingsPanel.locationLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
@ -616,11 +588,6 @@
</Component>
<Component class="javax.swing.JCheckBox" name="sendIngestMessagesCheckBox">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="sendIngestMessagesCheckBox" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties" key="HashLookupSettingsPanel.sendIngestMessagesCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
@ -631,11 +598,6 @@
</Component>
<Component class="javax.swing.JLabel" name="ingestWarningLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.modules.form.editors2.FontEditor">
<FontInfo relative="true">
<Font bold="false" component="ingestWarningLabel" property="font" relativeSize="false" size="11"/>
</FontInfo>
</Property>
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.editors2.IconEditor">
<Image iconType="3" name="/org/sleuthkit/autopsy/modules/hashdatabase/warning16.png"/>
</Property>

View File

@ -706,24 +706,20 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
}
});
informationLabel.setFont(informationLabel.getFont().deriveFont(informationLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(informationLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.informationLabel.text")); // NOI18N
informationScrollPanel.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
nameLabel.setFont(nameLabel.getFont().deriveFont(nameLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(nameLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.nameLabel.text")); // NOI18N
hashDbNameLabel.setFont(hashDbNameLabel.getFont().deriveFont(hashDbNameLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(hashDbNameLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.hashDbNameLabel.text")); // NOI18N
typeLabel.setFont(typeLabel.getFont().deriveFont(typeLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(typeLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.typeLabel.text")); // NOI18N
hashDbTypeLabel.setFont(hashDbTypeLabel.getFont().deriveFont(hashDbTypeLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(hashDbTypeLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.hashDbTypeLabel.text")); // NOI18N
locationLabel.setFont(locationLabel.getFont().deriveFont(locationLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(locationLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.locationLabel.text")); // NOI18N
hashDbLocationLabel.setFont(hashDbLocationLabel.getFont().deriveFont(hashDbLocationLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
@ -854,7 +850,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
}
});
sendIngestMessagesCheckBox.setFont(sendIngestMessagesCheckBox.getFont().deriveFont(sendIngestMessagesCheckBox.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
org.openide.awt.Mnemonics.setLocalizedText(sendIngestMessagesCheckBox, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.sendIngestMessagesCheckBox.text")); // NOI18N
sendIngestMessagesCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
@ -862,7 +857,6 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
}
});
ingestWarningLabel.setFont(ingestWarningLabel.getFont().deriveFont(ingestWarningLabel.getFont().getStyle() & ~java.awt.Font.BOLD, 11));
ingestWarningLabel.setIcon(new javax.swing.ImageIcon(getClass().getResource("/org/sleuthkit/autopsy/modules/hashdatabase/warning16.png"))); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(ingestWarningLabel, org.openide.util.NbBundle.getMessage(HashLookupSettingsPanel.class, "HashLookupSettingsPanel.ingestWarningLabel.text")); // NOI18N
@ -878,23 +872,18 @@ public final class HashLookupSettingsPanel extends IngestModuleGlobalSettingsPan
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addComponent(informationLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)
.addGap(356, 356, 356))
.addComponent(informationScrollPanel)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(indexButton)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(addHashesToDatabaseButton)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(informationScrollPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 420, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(addHashesToDatabaseButton))
.addComponent(sendIngestMessagesCheckBox)
.addComponent(ingestWarningLabel))
.addComponent(ingestWarningLabel)
.addComponent(informationLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 197, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(0, 0, Short.MAX_VALUE)))
.addContainerGap())))
.addContainerGap())
.addGroup(jPanel1Layout.createSequentialGroup()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(hashDatabasesLabel)

View File

@ -324,7 +324,7 @@ final class PhotoRecCarverFileIngestModule implements FileIngestModule {
return IngestModule.ProcessResult.ERROR;
} catch (IOException ex) {
totals.totalItemsWithErrors.incrementAndGet();
logger.log(Level.SEVERE, String.format("Error writing file '%s' (id=%d) to '%s' with the PhotoRec carver.", file.getName(), file.getId(), tempFilePath), ex); // NON-NLS
logger.log(Level.SEVERE, String.format("Error writing or processing file '%s' (id=%d) to '%s' with the PhotoRec carver.", file.getName(), file.getId(), tempFilePath), ex); // NON-NLS
MessageNotifyUtil.Notify.error(PhotoRecCarverIngestModuleFactory.getModuleName(), NbBundle.getMessage(PhotoRecCarverFileIngestModule.class, "PhotoRecIngestModule.error.msg", file.getName()));
return IngestModule.ProcessResult.ERROR;
} finally {

View File

@ -269,7 +269,7 @@ public class PlasoIngestModule implements DataSourceIngestModule {
String architectureFolder = PlatformUtil.is64BitOS() ? PLASO64 : PLASO32;
String executableToFindName = Paths.get(PLASO, architectureFolder, executableName).toString();
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PlasoIngestModule.class.getPackage().getName(), false);
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, "org.sleuthkit.autopsy.core", false);
if (null == exeFile || exeFile.canExecute() == false) {
throw new FileNotFoundException(executableName + " executable not found.");
}

View File

@ -46,16 +46,6 @@ class ArtifactTextExtractor implements TextExtractor {
// "content" string to be indexed.
StringBuilder artifactContents = new StringBuilder();
Content dataSource = null;
try {
dataSource = artifact.getDataSource();
} catch (TskCoreException tskCoreException) {
throw new InitReaderException("Unable to get datasource for artifact: " + artifact.toString(), tskCoreException);
}
if (dataSource == null) {
throw new InitReaderException("Datasource was null for artifact: " + artifact.toString());
}
try {
for (BlackboardAttribute attribute : artifact.getAttributes()) {
artifactContents.append(attribute.getAttributeType().getDisplayName());
@ -67,7 +57,7 @@ class ArtifactTextExtractor implements TextExtractor {
// in the Autopsy datamodel.
switch (attribute.getValueType()) {
case DATETIME:
artifactContents.append(ContentUtils.getStringTime(attribute.getValueLong(), dataSource));
artifactContents.append(ContentUtils.getStringTime(attribute.getValueLong(), artifact));
break;
default:
artifactContents.append(attribute.getDisplayString());

View File

@ -423,7 +423,7 @@ final class TikaTextExtractor implements TextExtractor {
}
String executableToFindName = Paths.get(TESSERACT_DIR_NAME, TESSERACT_EXECUTABLE).toString();
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, TikaTextExtractor.class.getPackage().getName(), false);
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, "org.sleuthkit.autopsy.core", false);
if (null == exeFile) {
return null;
}

View File

@ -0,0 +1,48 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.textsummarizer;
import java.io.IOException;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Interface for implementation of summarizers for documents.
*/
public interface TextSummarizer {
/**
* Get the name of the TextSummarizer for identification purposes.
*
* @return The name of the TextSummarizer.
*/
String getName();
/**
* Summarize the provided abstract file into a summary with a size no
* greater than the size specified.
*
* @param file The AbstractFile to summarize.
* @param summarySize The size of the summary to create.
*
* @return The summary as a string.
*
* @throws IOException
*/
String summarize(AbstractFile file, int summarySize) throws IOException;
}

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Copyright 2018-2020 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -62,6 +62,7 @@ import org.sleuthkit.autopsy.modules.photoreccarver.PhotoRecCarverIngestModuleFa
import org.sleuthkit.autopsy.modules.vmextractor.VMExtractorIngestModuleFactory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.autopsy.centralrepository.datamodel.CentralRepository;
import org.sleuthkit.autopsy.centralrepository.datamodel.RdbmsCentralRepoFactory;
/**
* Utilities for testing intercase correlation feature.
@ -220,7 +221,7 @@ class InterCaseTestUtils {
this.kitchenShink = new IngestJobSettings(InterCaseTestUtils.class.getCanonicalName(), IngestType.ALL_MODULES, kitchenSink);
try {
Collection<CorrelationAttributeInstance.Type> types = CorrelationAttributeInstance.getDefaultCorrelationTypes();
Collection<CorrelationAttributeInstance.Type> types = CentralRepository.getInstance().getCorrelationTypes();
//TODO use ids instead of strings
FILE_TYPE = types.stream().filter(type -> type.getDisplayName().equals("Files")).findAny().get();
@ -248,7 +249,7 @@ class InterCaseTestUtils {
CentralRepository.getInstance().shutdownConnections();
}
FileUtils.deleteDirectory(CENTRAL_REPO_DIRECTORY_PATH.toFile());
} catch (IOException | CentralRepoExceptionex) {
} catch (IOException | CentralRepoException ex) {
Exceptions.printStackTrace(ex);
Assert.fail(ex.getMessage());
}
@ -297,8 +298,10 @@ class InterCaseTestUtils {
crSettings.createDbDirectory();
}
crSettings.initializeDatabaseSchema();
crSettings.insertDefaultDatabaseContent();
RdbmsCentralRepoFactory centralRepoSchemaFactory = new RdbmsCentralRepoFactory(CentralRepoPlatforms.SQLITE, crSettings);
centralRepoSchemaFactory.initializeDatabaseSchema();
centralRepoSchemaFactory.insertDefaultDatabaseContent();
crSettings.saveSettings();
CentralRepoPlatforms.setSelectedPlatform(CentralRepoPlatforms.SQLITE.name());
CentralRepoPlatforms.saveSelectedPlatform();

View File

@ -1243,6 +1243,11 @@ public class SharedConfiguration {
HashDbManager hashDbManager = HashDbManager.getInstance();
hashDbManager.loadLastSavedConfiguration();
for (HashDbManager.HashDb hashDb : hashDbManager.getAllHashSets()) {
// Central Repository hash sets have no path and don't need to be copied
if (hashDb.getIndexPath().isEmpty() && hashDb.getDatabasePath().isEmpty()) {
continue;
}
if (hashDb.hasIndexOnly()) {
results.add(hashDb.getIndexPath());
} else {

View File

@ -92,6 +92,8 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
for tableName in CallLogAnalyzer._tableNames:
try:
tableFound = callLogDb.tableExists(tableName)
if tableFound:
resultSet = callLogDb.runQuery("SELECT number, date, duration, type, name FROM " + tableName + " ORDER BY date DESC;")
self._logger.log(Level.INFO, "Reading call log from table {0} in db {1}", [tableName, callLogDb.getDBFile().getName()])
if resultSet is not None:

View File

@ -102,13 +102,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
# check if contacts.name_raw_contact_id exists. Modify the query accordingly.
columnFound = False
metadata = contactDb.getConnectionMetadata()
columnListResultSet = metadata.getColumns(None, None, "contacts", None)
while columnListResultSet.next():
if columnListResultSet.getString("COLUMN_NAME") == "name_raw_contact_id":
columnFound = True
break
columnFound = contactDb.columnExists("contacts", "name_raw_contact_id")
if columnFound:
resultSet = contactDb.runQuery(
"SELECT mimetype, data1, name_raw_contact.display_name AS display_name \n"

View File

@ -176,6 +176,14 @@ class AdHocSearchChildFactory extends ChildFactory<KeyValue> {
* Get file properties.
*/
Map<String, Object> properties = new LinkedHashMap<>();
/**
* Add a snippet property, if available.
*/
if (hit.hasSnippet()) {
properties.put(TSK_KEYWORD_PREVIEW.getDisplayName(), hit.getSnippet());
}
Content content;
String contentName;
try {
@ -196,12 +204,6 @@ class AdHocSearchChildFactory extends ChildFactory<KeyValue> {
properties.put(LOCATION.toString(), contentName);
}
/**
* Add a snippet property, if available.
*/
if (hit.hasSnippet()) {
properties.put(TSK_KEYWORD_PREVIEW.getDisplayName(), hit.getSnippet());
}
String hitName;
BlackboardArtifact artifact = null;

View File

@ -457,7 +457,8 @@ public class SolrSearchService implements KeywordSearchService, AutopsyService {
@Subscribe
void handleNewArtifacts(Blackboard.ArtifactsPostedEvent event) {
for (BlackboardArtifact artifact : event.getArtifacts()) {
if (artifact.getArtifactTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) { //don't index KWH artifacts.
if ((artifact.getArtifactTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) && // don't index KWH bc it's based on existing indexed text
(artifact.getArtifactTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT.getTypeID())){ //don't index AO bc it has only an artifact ID - no useful text
try {
index(artifact);
} catch (TskCoreException ex) {

View File

@ -1,15 +1,22 @@
cannotBuildXmlParser=Unable to build XML parser:
cannotLoadSEUQA=Unable to load Search Engine URL Query Analyzer settings file, SEUQAMappings.xml:
cannotParseXml=Unable to parse XML file:
ChromeCacheExtract_adding_extracted_files_msg=Adding %d extracted files for analysis.
Chrome.getBookmark.errMsg.errAnalyzeFile={0}: Error while trying to analyze file: {1}
ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis.
ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.
ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s.
ChromeCacheExtractor.moduleName=ChromeCacheExtractor
# {0} - module name
# {1} - row number
# {2} - table length
# {3} - cache path
ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}
DataSourceUsage_AndroidMedia=Android Media Card
DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card
DataSourceUsage_FlashDrive=Flash Drive
# {0} - OS name
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
DataSourceUsageAnalyzer.parentModuleName=Recent Activity
Extract.dbConn.errMsg.failedToQueryDb={0}: Failed to query database.
Extract.indexError.message=Failed to index artifact for keyword search.
Extract.noOpenCase.errMsg=No open case available.
ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history
@ -18,6 +25,11 @@ ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Ed
ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file
ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer
ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file
ExtractIE.getBookmark.ere.noSpace=RecentActivity
ExtractIE.getBookmark.errMsg.errPostingBookmarks=Error posting Internet Explorer Bookmark artifacts.
ExtractIE.getCookie.errMsg.errPostingCookies=Error posting Internet Explorer Cookie artifacts.
ExtractIE.getHistory.errMsg.errPostingHistory=Error posting Internet Explorer History artifacts.
Extractor.errPostingArtifacts=Error posting {0} artifacts to the blackboard.
ExtractOs.androidOs.label=Android
ExtractOs.androidVolume.label=OS Drive (Android)
ExtractOs.debianLinuxOs.label=Linux (Debian)
@ -84,7 +96,7 @@ Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file
Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files.
Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
ExtractIE.moduleName.text=Internet Explorer
ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks.
ExtractIE.getBookmark.errMsg.errGettingBookmarks=Error getting Internet Explorer Bookmarks.
ExtractIE.parentModuleName.noSpace=RecentActivity
ExtractIE.parentModuleName=Recent Activity
ExtractIE.getURLFromIEBmkFile.errMsg={0}: Error parsing IE bookmark File {1}

View File

@ -142,7 +142,7 @@ class Chrome extends Extract {
progressBar.progress(Bundle.Progress_Message_Chrome_Cache());
ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context, progressBar);
chromeCacheExtractor.getCaches();
chromeCacheExtractor.processCaches();
}
/**

View File

@ -73,6 +73,18 @@ import org.sleuthkit.datamodel.TskException;
*
* We extract cache entries, create derived files if needed,
* and record the URL.
*
* CACHE BASICS (https://www.chromium.org/developers/design-documents/network-stack/disk-cache)
* - A cached item is broken up into segments (header, payload, etc.). The segments are not stored together.
* - Each user has a cache folder in AppData\Local\Google\Chrome\User Data\Default\Cache
* - Each folder has three kinds of files
* -- index: This is the main file. It has one entry for every cached item. You can start from here and work you way to the various data_x and f_XXX files that contain segments.
* -- data_X: These files are containers for small segments and other supporting data (such as the cache entry)
* -- f_XXXX: If the cached data cannot fit into a slot in data_X, it will be saved to its
* own f_XXXX file. These could be compressed if the data being sent was compressed.
* These are referred to as "External Files" in the below code.
* - A CacheAddress embeds information about which file something is stored in. This address is used in several structures to make it easy to abstract out where data is stored.
* - General Flow: index file -> process Cache Entry in data_X file -> process segment in data_X or f_XXX.
*/
final class ChromeCacheExtractor {
@ -100,22 +112,22 @@ final class ChromeCacheExtractor {
private FileManager fileManager;
// A file table to cache copies of index and data_n files.
private final Map<String, CacheFileCopy> fileCopyCache = new HashMap<>();
private final Map<String, FileWrapper> fileCopyCache = new HashMap<>();
// A file table to cache the f_* files.
private final Map<String, AbstractFile> externalFilesTable = new HashMap<>();
/**
* Encapsulates abstract file for a cache file as well as a temp file copy
* that can be accessed as a random access file.
* Allows methods to use data in an AbstractFile in a variety of
* ways. As a ByteBuffer, AbstractFile, etc. A local copy of the file
* backs the ByteBuffer.
*/
final class CacheFileCopy {
final class FileWrapper {
private final AbstractFile abstractFile;
private final RandomAccessFile fileCopy;
private final ByteBuffer byteBuffer;
CacheFileCopy (AbstractFile abstractFile, RandomAccessFile fileCopy, ByteBuffer buffer ) {
FileWrapper (AbstractFile abstractFile, RandomAccessFile fileCopy, ByteBuffer buffer ) {
this.abstractFile = abstractFile;
this.fileCopy = fileCopy;
this.byteBuffer = buffer;
@ -174,13 +186,13 @@ final class ChromeCacheExtractor {
}
/**
* Initializes the module to extract cache from a specific folder.
* Resets the internal caches and temp folders in between processing each user cache folder
*
* @param cachePath - path where cache files are found
* @param cachePath - path (in data source) of the cache being processed
*
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/
private void resetForNewFolder(String cachePath) throws IngestModuleException {
private void resetForNewCacheFolder(String cachePath) throws IngestModuleException {
fileCopyCache.clear();
externalFilesTable.clear();
@ -206,7 +218,7 @@ final class ChromeCacheExtractor {
*/
private void cleanup () {
for (Entry<String, CacheFileCopy> entry : this.fileCopyCache.entrySet()) {
for (Entry<String, FileWrapper> entry : this.fileCopyCache.entrySet()) {
Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(currentCase, moduleName), entry.getKey() );
try {
entry.getValue().getFileCopy().getChannel().close();
@ -246,7 +258,7 @@ final class ChromeCacheExtractor {
* A data source may have multiple Chrome user profiles and caches.
*
*/
void getCaches() {
void processCaches() {
try {
moduleInit();
@ -257,18 +269,18 @@ final class ChromeCacheExtractor {
}
// Find and process the cache folders. There could be one per user
List<AbstractFile> indexFiles;
try {
indexFiles = findCacheIndexFiles();
// Identify each cache folder by searching for the index files in each
List<AbstractFile> indexFiles = findIndexFiles();
// Process each of the caches
// Process each of the cache folders
for (AbstractFile indexFile: indexFiles) {
if (context.dataSourceIngestIsCancelled()) {
return;
}
processCacheIndexFile(indexFile);
processCacheFolder(indexFile);
}
} catch (TskCoreException ex) {
@ -278,62 +290,77 @@ final class ChromeCacheExtractor {
}
@Messages({
"ChromeCacheExtract_adding_extracted_files_msg=Adding %d extracted files for analysis."
"ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.",
"ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis.",
"ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s."
})
/**
* Processes a user's cache and creates corresponding artifacts and derived files.
* Will ultimately process the f_XXXX and data_X files in the folder.
*
* @param cacheIndexFile Cache index file for a given user
* @param indexFile Index file that is located in a user's cache folder
*/
private void processCacheIndexFile(AbstractFile indexAbstractFile) {
private void processCacheFolder(AbstractFile indexFile) {
String cachePath = indexAbstractFile.getParentPath();
Optional<CacheFileCopy> indexFileCopy;
String cacheFolderName = indexFile.getParentPath();
Optional<FileWrapper> indexFileWrapper;
/*
* The first part of this method is all about finding the needed files in the cache
* folder and making internal copies/caches of them so that we can later process them
* and effeciently look them up.
*/
try {
resetForNewFolder(cachePath);
progressBar.progress(String.format(Bundle.ChromeCacheExtract_loading_files_msg(), cacheFolderName));
resetForNewCacheFolder(cacheFolderName);
// @@@ This is little ineffecient because we later in this call search for the AbstractFile that we currently have
indexFileCopy = this.getCacheFileCopy(indexAbstractFile.getName(), cachePath);
if (!indexFileCopy.isPresent()) {
String msg = String.format("Failed to find copy cache index file %s", indexAbstractFile.getUniquePath());
// Load the index file into the caches
indexFileWrapper = findDataOrIndexFile(indexFile.getName(), cacheFolderName);
if (!indexFileWrapper.isPresent()) {
String msg = String.format("Failed to find copy cache index file %s", indexFile.getUniquePath());
logger.log(Level.WARNING, msg);
return;
}
// load the data files. We do this now to load them into the cache
// load the data files into the internal cache. We do this because we often
// jump in between the various data_X files resolving segments
for (int i = 0; i < 4; i ++) {
Optional<CacheFileCopy> dataFile = findAndCopyCacheFile(String.format("data_%1d",i), cachePath );
Optional<FileWrapper> dataFile = findDataOrIndexFile(String.format("data_%1d",i), cacheFolderName );
if (!dataFile.isPresent()) {
return;
}
}
// find all f_* files in a single query and load them into the cache
findExternalFiles(cachePath);
// we do this here so that it is a single query instead of hundreds of individual ones
findExternalFiles(cacheFolderName);
} catch (TskCoreException | IngestModuleException ex) {
String msg = "Failed to find cache files in path " + cachePath; //NON-NLS
String msg = "Failed to find cache files in path " + cacheFolderName; //NON-NLS
logger.log(Level.WARNING, msg, ex);
return;
}
// parse the index file
logger.log(Level.INFO, "{0}- Now reading Cache index file from path {1}", new Object[]{moduleName, cachePath }); //NON-NLS
/*
* Now the analysis begins. We parse the index file and that drives parsing entries
* from data_X or f_XXXX files.
*/
logger.log(Level.INFO, "{0}- Now reading Cache index file from path {1}", new Object[]{moduleName, cacheFolderName }); //NON-NLS
List<AbstractFile> derivedFiles = new ArrayList<>();
Collection<BlackboardArtifact> sourceArtifacts = new ArrayList<>();
Collection<BlackboardArtifact> webCacheArtifacts = new ArrayList<>();
Collection<BlackboardArtifact> artifactsAdded = new ArrayList<>();
ByteBuffer indexFileROBuffer = indexFileCopy.get().getByteBuffer();
ByteBuffer indexFileROBuffer = indexFileWrapper.get().getByteBuffer();
IndexFileHeader indexHdr = new IndexFileHeader(indexFileROBuffer);
// seek past the header
indexFileROBuffer.position(INDEXFILE_HDR_SIZE);
// Process each address in the table
/* Cycle through index and get the CacheAddress for each CacheEntry. Process each entry
* to extract data, add artifacts, etc. from the f_XXXX and data_x files */
for (int i = 0; i < indexHdr.getTableLen(); i++) {
if (context.dataSourceIngestIsCancelled()) {
@ -341,13 +368,13 @@ final class ChromeCacheExtractor {
return;
}
CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK, cachePath);
CacheAddress addr = new CacheAddress(indexFileROBuffer.getInt() & UINT32_MASK, cacheFolderName);
if (addr.isInitialized()) {
progressBar.progress( NbBundle.getMessage(this.getClass(),
progressBar.progress(NbBundle.getMessage(this.getClass(),
"ChromeCacheExtractor.progressMsg",
moduleName, i, indexHdr.getTableLen(), cachePath) );
moduleName, i, indexHdr.getTableLen(), cacheFolderName) );
try {
List<DerivedFile> addedFiles = this.processCacheEntry(addr, sourceArtifacts, webCacheArtifacts);
List<DerivedFile> addedFiles = processCacheEntry(addr, artifactsAdded);
derivedFiles.addAll(addedFiles);
}
catch (TskCoreException | IngestModuleException ex) {
@ -361,19 +388,19 @@ final class ChromeCacheExtractor {
return;
}
progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_extracted_files_msg(), derivedFiles.size()));
// notify listeners of new files and schedule for analysis
progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_extracted_files_msg(), derivedFiles.size()));
derivedFiles.forEach((derived) -> {
services.fireModuleContentEvent(new ModuleContentEvent(derived));
});
context.addFilesToJob(derivedFiles);
// notify listeners about new artifacts
progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_artifacts_msg(), artifactsAdded.size()));
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
try {
blackboard.postArtifacts(sourceArtifacts, moduleName);
blackboard.postArtifacts(webCacheArtifacts, moduleName);
blackboard.postArtifacts(artifactsAdded, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.WARNING, String.format("Failed to post cacheIndex artifacts "), ex); //NON-NLS
}
@ -382,49 +409,47 @@ final class ChromeCacheExtractor {
}
/**
* Gets the cache entry at the specified address.
* Processes the cache entry that is stored at the given address. A CacheEntry is
* located in a data_X file and stores information about where the various segments
* for a given cached entry are located.
*
* Extracts the files if needed and adds as derived files, creates artifacts
*
* @param cacheEntryAddress cache entry address
* @param associatedObjectArtifacts any associated object artifacts created are added to this collection
* @param webCacheArtifacts any web cache artifacts created are added to this collection
* @param cacheAddress Address where CacheEntry is located (from index file)
* @param artifactsAdded any artifact that was added
*
* @return Optional derived file, is a derived file is added for the given entry
*/
private List<DerivedFile> processCacheEntry(CacheAddress cacheEntryAddress, Collection<BlackboardArtifact> associatedObjectArtifacts, Collection<BlackboardArtifact> webCacheArtifacts ) throws TskCoreException, IngestModuleException {
private List<DerivedFile> processCacheEntry(CacheAddress cacheAddress, Collection<BlackboardArtifact> artifactsAdded ) throws TskCoreException, IngestModuleException {
List<DerivedFile> derivedFiles = new ArrayList<>();
// get the path to the corresponding data_X file
String dataFileName = cacheEntryAddress.getFilename();
String cachePath = cacheEntryAddress.getCachePath();
// get the path to the corresponding data_X file for the cache entry
String cacheEntryFileName = cacheAddress.getFilename();
String cachePath = cacheAddress.getCachePath();
Optional<CacheFileCopy> cacheEntryFile = this.getCacheFileCopy(dataFileName, cachePath);
if (!cacheEntryFile.isPresent()) {
String msg = String.format("Failed to get cache entry at address %s", cacheEntryAddress); //NON-NLS
Optional<FileWrapper> cacheEntryFileOptional = findDataOrIndexFile(cacheEntryFileName, cachePath);
if (!cacheEntryFileOptional.isPresent()) {
String msg = String.format("Failed to find data file %s", cacheEntryFileName); //NON-NLS
throw new IngestModuleException(msg);
}
// Load the entry to get its metadata, segments, etc.
CacheEntry cacheEntry = new CacheEntry(cacheAddress, cacheEntryFileOptional.get() );
List<CacheDataSegment> dataSegments = cacheEntry.getDataSegments();
// Get the cache entry and its data segments
CacheEntry cacheEntry = new CacheEntry(cacheEntryAddress, cacheEntryFile.get() );
List<CacheData> dataEntries = cacheEntry.getData();
// Only process the first payload data segment in each entry
// first data segement has the HTTP headers, 2nd is the payload
if (dataEntries.size() < 2) {
if (dataSegments.size() < 2) {
return derivedFiles;
}
CacheData dataSegment = dataEntries.get(1);
CacheDataSegment dataSegment = dataSegments.get(1);
// name of the file that was downloaded and cached (or data_X if it was saved into there)
String cachedFileName = dataSegment.getAddress().getFilename();
Optional<AbstractFile> cachedFileAbstractFile = this.findCacheFile(cachedFileName, cachePath);
if (!cachedFileAbstractFile.isPresent()) {
logger.log(Level.WARNING, "Error finding file: " + cachePath + "/" + cachedFileName); //NON-NLS
// Name where segment is located (could be diffrent from where entry was located)
String segmentFileName = dataSegment.getCacheAddress().getFilename();
Optional<AbstractFile> segmentFileAbstractFile = findAbstractFile(segmentFileName, cachePath);
if (!segmentFileAbstractFile.isPresent()) {
logger.log(Level.WARNING, "Error finding segment file: " + cachePath + "/" + segmentFileName); //NON-NLS
return derivedFiles;
}
@ -433,112 +458,90 @@ final class ChromeCacheExtractor {
isBrotliCompressed = true;
}
// setup some attributes for later use
BlackboardAttribute urlAttr = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
moduleName,
((cacheEntry.getKey() != null) ? cacheEntry.getKey() : ""));
BlackboardAttribute createTimeAttr = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
moduleName,
cacheEntry.getCreationTime());
BlackboardAttribute httpHeaderAttr = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HEADERS,
moduleName,
cacheEntry.getHTTPHeaders());
Collection<BlackboardAttribute> webCacheAttributes = new ArrayList<>();
webCacheAttributes.add(urlAttr);
webCacheAttributes.add(createTimeAttr);
webCacheAttributes.add(httpHeaderAttr);
// add artifacts to the f_XXX file
if (dataSegment.isInExternalFile() ) {
// Make artifacts around the cached item and extract data from data_X file
try {
BlackboardArtifact webCacheArtifact = cacheEntryFile.get().getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE);
if (webCacheArtifact != null) {
webCacheArtifact.addAttributes(webCacheAttributes);
// Add path of f_* file as attribute
webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH,
moduleName,
cachedFileAbstractFile.get().getUniquePath()));
webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID,
moduleName, cachedFileAbstractFile.get().getId()));
webCacheArtifacts.add(webCacheArtifact);
BlackboardArtifact associatedObjectArtifact = cachedFileAbstractFile.get().newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
if (associatedObjectArtifact != null) {
associatedObjectArtifact.addAttribute(
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
moduleName, webCacheArtifact.getArtifactID()));
associatedObjectArtifacts.add(associatedObjectArtifact);
AbstractFile cachedItemFile; //
/* If the cached data is in a f_XXXX file, we only need to make artifacts. */
if (dataSegment.isInExternalFile() ) {
cachedItemFile = segmentFileAbstractFile.get();
}
}
if (isBrotliCompressed) {
cachedFileAbstractFile.get().setMIMEType(BROTLI_MIMETYPE);
cachedFileAbstractFile.get().save();
}
} catch (TskException ex) {
logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS
}
}
// extract the embedded data to a derived file and create artifacts
/* If the data is in a data_X file, we need to extract it out and then make the similar artifacts */
else {
// Data segments in "data_x" files are saved in individual files and added as derived files
String filename = dataSegment.save();
String relPathname = getRelOutputFolderName() + dataSegment.getAddress().getCachePath() + filename;
try {
String relPathname = getRelOutputFolderName() + dataSegment.getCacheAddress().getCachePath() + filename;
// @@@ We should batch these up and do them in one big insert / transaction
DerivedFile derivedFile = fileManager.addDerivedFile(filename, relPathname,
dataSegment.getDataLength(),
cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), cacheEntry.getCreationTime(), // TBD
true,
cachedFileAbstractFile.get(),
segmentFileAbstractFile.get(),
"",
moduleName,
VERSION_NUMBER,
"",
TskData.EncodingType.NONE);
derivedFiles.add(derivedFile);
cachedItemFile = derivedFile;
}
BlackboardArtifact webCacheArtifact = cacheEntryFile.get().getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE);
addArtifacts(cacheEntry, cacheEntryFileOptional.get().getAbstractFile(), cachedItemFile, artifactsAdded);
// Tika doesn't detect these types. So, make sure they have the correct MIME type */
if (isBrotliCompressed) {
cachedItemFile.setMIMEType(BROTLI_MIMETYPE);
cachedItemFile.save();
}
} catch (TskException ex) {
logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS
}
return derivedFiles;
}
/**
* Add artifacts for a given cached item
*
* @param cacheEntry Entry item came from
* @param cacheEntryFile File that stored the cache entry
* @param cachedItemFile File that stores the cached data (Either a derived file or f_XXXX file)
* @param artifactsAdded List of artifacts that were added by this call
* @throws TskCoreException
*/
private void addArtifacts(CacheEntry cacheEntry, AbstractFile cacheEntryFile, AbstractFile cachedItemFile, Collection<BlackboardArtifact> artifactsAdded) throws TskCoreException {
// Create a TSK_WEB_CACHE entry with the parent as data_X file that had the cache entry
BlackboardArtifact webCacheArtifact = cacheEntryFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_CACHE);
if (webCacheArtifact != null) {
webCacheArtifact.addAttributes(webCacheAttributes);
// Add path of derived file as attribute
webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH,
Collection<BlackboardAttribute> webAttr = new ArrayList<>();
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
moduleName,
derivedFile.getUniquePath()));
((cacheEntry.getKey() != null) ? cacheEntry.getKey() : "")));
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
moduleName, cacheEntry.getCreationTime()));
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HEADERS,
moduleName, cacheEntry.getHTTPHeaders()));
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH,
moduleName, cachedItemFile.getUniquePath()));
webAttr.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID,
moduleName, cachedItemFile.getId()));
webCacheArtifact.addAttributes(webAttr);
artifactsAdded.add(webCacheArtifact);
webCacheArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID,
moduleName, derivedFile.getId()));
webCacheArtifacts.add(webCacheArtifact);
BlackboardArtifact associatedObjectArtifact = derivedFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
// Create a TSK_ASSOCIATED_OBJECT on the f_XXX or derived file file back to the CACHE entry
BlackboardArtifact associatedObjectArtifact = cachedItemFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
if (associatedObjectArtifact != null) {
associatedObjectArtifact.addAttribute(
new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
moduleName, webCacheArtifact.getArtifactID()));
associatedObjectArtifacts.add(associatedObjectArtifact);
artifactsAdded.add(associatedObjectArtifact);
}
}
if (isBrotliCompressed) {
derivedFile.setMIMEType(BROTLI_MIMETYPE);
derivedFile.save();
}
derivedFiles.add(derivedFile);
} catch (TskException ex) {
logger.log(Level.SEVERE, "Error while trying to add an artifact", ex); //NON-NLS
}
}
return derivedFiles;
}
/**
@ -557,26 +560,27 @@ final class ChromeCacheExtractor {
}
}
/**
* Finds abstract file for cache file with a specified name.
* Finds a file with a given name in a given cache folder
* First checks in the file tables.
*
* @param cacheFileName
* @return Optional abstract file
* @throws TskCoreException
*/
private Optional<AbstractFile> findCacheFile(String cacheFileName, String cachePath) throws TskCoreException {
private Optional<AbstractFile> findAbstractFile(String cacheFileName, String cacheFolderName) throws TskCoreException {
// see if it is cached
String fileTableKey = cachePath + cacheFileName;
String fileTableKey = cacheFolderName + cacheFileName;
if (cacheFileName.startsWith("f_") && externalFilesTable.containsKey(fileTableKey)) {
return Optional.of(externalFilesTable.get(fileTableKey));
}
if (fileCopyCache.containsKey(fileTableKey)) {
return Optional.of(fileCopyCache.get(fileTableKey).getAbstractFile());
}
List<AbstractFile> cacheFiles = fileManager.findFiles(dataSource, cacheFileName, cachePath); //NON-NLS
List<AbstractFile> cacheFiles = fileManager.findFiles(dataSource, cacheFileName, cacheFolderName); //NON-NLS
if (!cacheFiles.isEmpty()) {
for (AbstractFile abstractFile: cacheFiles ) {
if (abstractFile.getUniquePath().trim().endsWith(DEFAULT_CACHE_PATH_STR)) {
@ -590,57 +594,51 @@ final class ChromeCacheExtractor {
}
/**
* Finds abstract file(s) for a cache file with the specified name.
* Finds the "index" file that exists in each user's cache. This is used to
* enumerate all of the caches on the system.
*
* @return list of abstract files matching the specified file name
* @return list of index files in Chrome cache folders
* @throws TskCoreException
*/
private List<AbstractFile> findCacheIndexFiles() throws TskCoreException {
private List<AbstractFile> findIndexFiles() throws TskCoreException {
return fileManager.findFiles(dataSource, "index", DEFAULT_CACHE_PATH_STR); //NON-NLS
}
/**
* Returns CacheFileCopy for the specified file from the file table.
* Find the file and creates a copy if it isn't already in the table.
* Finds the specified data or index cache file under the specified path.
* The FileWrapper is easier to parse than a raw AbstractFile.
* Will save the file to an internal cache. For the f_XXXX files, use
* findAbstractFile().
*
* @param cacheFileName Name of file
* @param cachePath Parent path of file
* @return CacheFileCopy
* @param cacheFileName Name file file
* @param cacheFolderName Name of user's cache folder
* @return Cache file copy
* @throws TskCoreException
*/
private Optional<CacheFileCopy> getCacheFileCopy(String cacheFileName, String cachePath) throws TskCoreException, IngestModuleException {
private Optional<FileWrapper> findDataOrIndexFile(String cacheFileName, String cacheFolderName) throws TskCoreException, IngestModuleException {
// Check if the file is already in the cache
String fileTableKey = cachePath + cacheFileName;
String fileTableKey = cacheFolderName + cacheFileName;
if (fileCopyCache.containsKey(fileTableKey)) {
return Optional.of(fileCopyCache.get(fileTableKey));
}
return findAndCopyCacheFile(cacheFileName, cachePath);
}
/**
* Finds the specified cache file under the specified path, and makes a temporary copy.
*
* @param cacheFileName
* @return Cache file copy
* @throws TskCoreException
*/
private Optional<CacheFileCopy> findAndCopyCacheFile(String cacheFileName, String cachePath) throws TskCoreException, IngestModuleException {
Optional<AbstractFile> cacheFileOptional = findCacheFile(cacheFileName, cachePath);
if (!cacheFileOptional.isPresent()) {
// Use Autopsy to get the AbstractFile
Optional<AbstractFile> abstractFileOptional = findAbstractFile(cacheFileName, cacheFolderName);
if (!abstractFileOptional.isPresent()) {
return Optional.empty();
}
// Wrap the file so that we can get the ByteBuffer later.
// @@@ BC: I think this should nearly all go into FileWrapper and be done lazily and perhaps based on size.
// Many of the files are small enough to keep in memory for the ByteBuffer
// write the file to disk so that we can have a memory-mapped ByteBuffer
// @@@ NOTE: I"m not sure this is needed. These files are small enough and we could probably just load them into
// a byte[] for ByteBuffer.
AbstractFile cacheFile = cacheFileOptional.get();
AbstractFile cacheFile = abstractFileOptional.get();
RandomAccessFile randomAccessFile = null;
String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cachePath + cacheFile.getName(); //NON-NLS
String tempFilePathname = RAImageIngestModule.getRATempPath(currentCase, moduleName) + cacheFolderName + cacheFile.getName(); //NON-NLS
try {
File newFile = new File(tempFilePathname);
ContentUtils.writeToFile(cacheFile, newFile, context::dataSourceIngestIsCancelled);
@ -651,13 +649,13 @@ final class ChromeCacheExtractor {
(int) roChannel.size());
cacheFileROBuf.order(ByteOrder.nativeOrder());
CacheFileCopy cacheFileCopy = new CacheFileCopy(cacheFile, randomAccessFile, cacheFileROBuf );
FileWrapper cacheFileWrapper = new FileWrapper(cacheFile, randomAccessFile, cacheFileROBuf );
if (!cacheFileName.startsWith("f_")) {
fileCopyCache.put(cachePath + cacheFileName, cacheFileCopy);
fileCopyCache.put(cacheFolderName + cacheFileName, cacheFileWrapper);
}
return Optional.of(cacheFileCopy);
return Optional.of(cacheFileWrapper);
}
catch (IOException ex) {
@ -699,7 +697,7 @@ final class ChromeCacheExtractor {
lastFile = indexFileROBuf.getInt();
indexFileROBuf.position(indexFileROBuf.position()+4); // this_id
indexFileROBuf.position(indexFileROBuf.position()+4); // stats cache address
indexFileROBuf.position(indexFileROBuf.position()+4); // stats cache cacheAddress
tableLen = indexFileROBuf.getInt();
}
@ -745,7 +743,7 @@ final class ChromeCacheExtractor {
}
/**
* Cache file type enum - as encoded the address
* Cache file type enum - as encoded the cacheAddress
*/
enum CacheFileTypeEnum {
EXTERNAL,
@ -761,18 +759,21 @@ final class ChromeCacheExtractor {
/**
* Encapsulates Cache address.
*
* CacheAddress is a unsigned 32 bit number
* Google defines the notion of a CacheAddress that spans the various
* files in the cache. The 32-bit number embeds which file and offset
* the address is in.
* The below defines what each bit means. A 1 means the bit is used
* for that value.
*
* Header:
* 1000 0000 0000 0000 0000 0000 0000 0000 : initialized bit
* 0111 0000 0000 0000 0000 0000 0000 0000 : file type
*
* If separate file:
* If external file: (i.e. f_XXXX)
* 0000 1111 1111 1111 1111 1111 1111 1111 : file# 0 - 268,435,456 (2^28)
*
* If block file:
* If block file: (i.e. data_X)
* 0000 1100 0000 0000 0000 0000 0000 0000 : reserved bits
* 0000 0011 0000 0000 0000 0000 0000 0000 : number of contiguous blocks 1-4
* 0000 0000 1111 1111 0000 0000 0000 0000 : file selector 0 - 255
@ -780,7 +781,7 @@ final class ChromeCacheExtractor {
*
*/
final class CacheAddress {
// sundry constants to parse the bit fields in address
// sundry constants to parse the bit fields
private static final long ADDR_INITIALIZED_MASK = 0x80000000l;
private static final long FILE_TYPE_MASK = 0x70000000;
private static final long FILE_TYPE_OFFSET = 28;
@ -801,11 +802,18 @@ final class ChromeCacheExtractor {
private final String cachePath;
/**
*
* @param uint32 Encoded address
* @param cachePath Folder that index file was located in
*/
CacheAddress(long uint32, String cachePath) {
uint32CacheAddr = uint32;
this.cachePath = cachePath;
// analyze the
int fileTypeEnc = (int)(uint32CacheAddr & FILE_TYPE_MASK) >> FILE_TYPE_OFFSET;
fileType = CacheFileTypeEnum.values()[fileTypeEnc];
@ -930,33 +938,33 @@ final class ChromeCacheExtractor {
*
* A data segment may be compressed - GZIP and BRotli are the two commonly used methods.
*/
final class CacheData {
final class CacheDataSegment {
private int length;
private final CacheAddress address;
private final CacheAddress cacheAddress;
private CacheDataTypeEnum type;
private boolean isHTTPHeaderHint;
private CacheFileCopy cacheFileCopy = null;
private FileWrapper cacheFileCopy = null;
private byte[] data = null;
private String httpResponse;
private final Map<String, String> httpHeaders = new HashMap<>();
CacheData(CacheAddress cacheAdress, int len) {
this(cacheAdress, len, false);
CacheDataSegment(CacheAddress cacheAddress, int len) {
this(cacheAddress, len, false);
}
CacheData(CacheAddress cacheAdress, int len, boolean isHTTPHeader ) {
CacheDataSegment(CacheAddress cacheAddress, int len, boolean isHTTPHeader ) {
this.type = CacheDataTypeEnum.UNKNOWN;
this.length = len;
this.address = cacheAdress;
this.cacheAddress = cacheAddress;
this.isHTTPHeaderHint = isHTTPHeader;
}
boolean isInExternalFile() {
return address.isInExternalFile();
return cacheAddress.isInExternalFile();
}
boolean hasHTTPHeaders() {
@ -1006,13 +1014,13 @@ final class ChromeCacheExtractor {
}
// Don't extract data from external files.
if (!address.isInExternalFile() ) {
if (!cacheAddress.isInExternalFile() ) {
cacheFileCopy = getCacheFileCopy(address.getFilename(), address.getCachePath()).get();
cacheFileCopy = findDataOrIndexFile(cacheAddress.getFilename(), cacheAddress.getCachePath()).get();
this.data = new byte [length];
ByteBuffer buf = cacheFileCopy.getByteBuffer();
int dataOffset = DATAFILE_HDR_SIZE + address.getStartBlock() * address.getBlockSize();
int dataOffset = DATAFILE_HDR_SIZE + cacheAddress.getStartBlock() * cacheAddress.getBlockSize();
buf.position(dataOffset);
buf.get(data, 0, length);
@ -1095,8 +1103,8 @@ final class ChromeCacheExtractor {
return type;
}
CacheAddress getAddress() {
return address;
CacheAddress getCacheAddress() {
return cacheAddress;
}
@ -1111,13 +1119,13 @@ final class ChromeCacheExtractor {
String save() throws TskCoreException, IngestModuleException {
String fileName;
if (address.isInExternalFile()) {
fileName = address.getFilename();
if (cacheAddress.isInExternalFile()) {
fileName = cacheAddress.getFilename();
} else {
fileName = String.format("%s__%08x", address.getFilename(), address.getUint32CacheAddr());
fileName = String.format("%s__%08x", cacheAddress.getFilename(), cacheAddress.getUint32CacheAddr());
}
String filePathName = getAbsOutputFolderName() + address.getCachePath() + fileName;
String filePathName = getAbsOutputFolderName() + cacheAddress.getCachePath() + fileName;
save(filePathName);
return fileName;
@ -1199,7 +1207,7 @@ final class ChromeCacheExtractor {
// int32 state; // Current state.
// uint64 creation_time;
// int32 key_len;
// CacheAddr long_key; // Optional address of a long key.
// CacheAddr long_key; // Optional cacheAddress of a long key.
// int32 data_size[4]; // We can store up to 4 data streams for each
// CacheAddr data_addr[4]; // entry.
// uint32 flags; // Any combination of EntryFlags.
@ -1217,7 +1225,7 @@ final class ChromeCacheExtractor {
private static final int MAX_KEY_LEN = 256-24*4;
private final CacheAddress selfAddress;
private final CacheFileCopy cacheFileCopy;
private final FileWrapper cacheFileCopy;
private final long hash;
private final CacheAddress nextAddress;
@ -1230,17 +1238,17 @@ final class ChromeCacheExtractor {
private final long creationTime;
private final int keyLen;
private final CacheAddress longKeyAddresses; // address of the key, if the key is external to the entry
private final CacheAddress longKeyAddresses; // cacheAddress of the key, if the key is external to the entry
private final int dataSizes[];
private final CacheAddress dataAddresses[];
private List<CacheData> dataList;
private final int[] dataSegmentSizes;
private final CacheAddress[] dataSegmentIndexFileEntries;
private List<CacheDataSegment> dataSegments;
private final long flags;
private String key; // Key may be found within the entry or may be external
CacheEntry(CacheAddress cacheAdress, CacheFileCopy cacheFileCopy ) {
CacheEntry(CacheAddress cacheAdress, FileWrapper cacheFileCopy ) {
this.selfAddress = cacheAdress;
this.cacheFileCopy = cacheFileCopy;
@ -1270,14 +1278,14 @@ final class ChromeCacheExtractor {
uint32 = fileROBuf.getInt() & UINT32_MASK;
longKeyAddresses = (uint32 != 0) ? new CacheAddress(uint32, selfAddress.getCachePath()) : null;
dataList = null;
dataSizes= new int[4];
dataSegments = null;
dataSegmentSizes= new int[4];
for (int i = 0; i < 4; i++) {
dataSizes[i] = fileROBuf.getInt();
dataSegmentSizes[i] = fileROBuf.getInt();
}
dataAddresses = new CacheAddress[4];
dataSegmentIndexFileEntries = new CacheAddress[4];
for (int i = 0; i < 4; i++) {
dataAddresses[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK, selfAddress.getCachePath());
dataSegmentIndexFileEntries[i] = new CacheAddress(fileROBuf.getInt() & UINT32_MASK, selfAddress.getCachePath());
}
flags = fileROBuf.getInt() & UINT32_MASK;
@ -1293,7 +1301,7 @@ final class ChromeCacheExtractor {
if (longKeyAddresses != null) {
// Key is stored outside of the entry
try {
CacheData data = new CacheData(longKeyAddresses, this.keyLen, true);
CacheDataSegment data = new CacheDataSegment(longKeyAddresses, this.keyLen, true);
key = data.getDataString();
} catch (TskCoreException | IngestModuleException ex) {
logger.log(Level.WARNING, String.format("Failed to get external key from address %s", longKeyAddresses)); //NON-NLS
@ -1315,7 +1323,7 @@ final class ChromeCacheExtractor {
}
}
public CacheAddress getAddress() {
public CacheAddress getCacheAddress() {
return selfAddress;
}
@ -1323,7 +1331,7 @@ final class ChromeCacheExtractor {
return hash;
}
public CacheAddress getNextAddress() {
public CacheAddress getNextCacheAddress() {
return nextAddress;
}
@ -1359,20 +1367,20 @@ final class ChromeCacheExtractor {
* @throws TskCoreException
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/
public List<CacheData> getData() throws TskCoreException, IngestModuleException {
public List<CacheDataSegment> getDataSegments() throws TskCoreException, IngestModuleException {
if (dataList == null) {
dataList = new ArrayList<>();
if (dataSegments == null) {
dataSegments = new ArrayList<>();
for (int i = 0; i < 4; i++) {
if (dataSizes[i] > 0) {
CacheData cacheData = new CacheData(dataAddresses[i], dataSizes[i], true );
if (dataSegmentSizes[i] > 0) {
CacheDataSegment cacheData = new CacheDataSegment(dataSegmentIndexFileEntries[i], dataSegmentSizes[i], true );
cacheData.extract();
dataList.add(cacheData);
dataSegments.add(cacheData);
}
}
}
return dataList;
return dataSegments;
}
/**
@ -1383,10 +1391,10 @@ final class ChromeCacheExtractor {
* @return true if the entry has HTTP headers
*/
boolean hasHTTPHeaders() {
if ((dataList == null) || dataList.isEmpty()) {
if ((dataSegments == null) || dataSegments.isEmpty()) {
return false;
}
return dataList.get(0).hasHTTPHeaders();
return dataSegments.get(0).hasHTTPHeaders();
}
/**
@ -1396,11 +1404,11 @@ final class ChromeCacheExtractor {
* @return header value, null if not found
*/
String getHTTPHeader(String key) {
if ((dataList == null) || dataList.isEmpty()) {
if ((dataSegments == null) || dataSegments.isEmpty()) {
return null;
}
// First data segment has the HTTP headers, if any
return dataList.get(0).getHTTPHeader(key);
return dataSegments.get(0).getHTTPHeader(key);
}
/**
@ -1409,11 +1417,11 @@ final class ChromeCacheExtractor {
* @return header value, null if not found
*/
String getHTTPHeaders() {
if ((dataList == null) || dataList.isEmpty()) {
if ((dataSegments == null) || dataSegments.isEmpty()) {
return null;
}
// First data segment has the HTTP headers, if any
return dataList.get(0).getHTTPHeaders();
return dataSegments.get(0).getHTTPHeaders();
}
/**
@ -1449,11 +1457,11 @@ final class ChromeCacheExtractor {
(nextAddress != null) ? nextAddress.toString() : "None"));
for (int i = 0; i < 4; i++) {
if (dataSizes[i] > 0) {
if (dataSegmentSizes[i] > 0) {
sb.append(String.format("\n\tData %d: cache address = %s, Data = %s",
i, dataAddresses[i].toString(),
(dataList != null)
? dataList.get(i).toString()
i, dataSegmentIndexFileEntries[i].toString(),
(dataSegments != null)
? dataSegments.get(i).toString()
: "Data not retrived yet."));
}
}

View File

@ -23,10 +23,10 @@
package org.sleuthkit.autopsy.recentactivity;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.BufferUnderflowException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -72,6 +72,8 @@ final class ExtractRecycleBin extends Extract {
private static final String RECYCLE_BIN_ARTIFACT_NAME = "TSK_RECYCLE_BIN"; //NON-NLS
private static final String RECYCLE_BIN_DIR_NAME = "$RECYCLE.BIN"; //NON-NLS
private static final int V1_FILE_NAME_OFFSET = 24;
private static final int V2_FILE_NAME_OFFSET = 28;
@ -127,7 +129,7 @@ final class ExtractRecycleBin extends Extract {
// Get the $I files
List<AbstractFile> iFiles;
try {
iFiles = fileManager.findFiles(dataSource, "$I%"); //NON-NLS
iFiles = fileManager.findFiles(dataSource, "$I%", RECYCLE_BIN_DIR_NAME); //NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Unable to find recycle bin I files.", ex); //NON-NLS
return; // No need to continue
@ -149,7 +151,8 @@ final class ExtractRecycleBin extends Extract {
}
/**
* Process each individual iFile.
* Process each individual iFile. Each iFile ($I) contains metadata about files that have been deleted.
* Each $I file should have a corresponding $R file which is the actuall deleted file.
*
* @param context
* @param recycleBinArtifactType Module created artifact type
@ -174,7 +177,7 @@ final class ExtractRecycleBin extends Extract {
try {
metaData = parseIFile(tempFilePath);
} catch (IOException ex) {
logger.log(Level.WARNING, String.format("Unable to parse iFile %s", iFile.getName()), ex); //NON-NLS
logger.log(Level.WARNING, String.format("Unable to parse iFile %s", iFile.getParentPath() + iFile.getName()), ex); //NON-NLS
// Unable to parse the $I file move onto the next file
return;
}
@ -266,38 +269,35 @@ final class ExtractRecycleBin extends Extract {
}
/**
* Parse the $I file.
* Parse the $I file. This file contains metadata information about deleted files
*
* File format prior to Windows 10:
* <table>
* <tr><th>Offset</th><th>Size</th><th>Description</th></tr>
* <tr><td>0</td><td>8</td><td>Header</td></tr>
* <tr><td>8</td><td>8</td><td>File Size</td></tr>
* <tr><td>16</td><td>8</td><td>Deleted Timestamp</td></tr>
* <tr><td>24</td><td>520</td><td>File Name</td></tr>
* </table>
* Offset Size Description
* 0 8 Header
* 8 8 File Size
* 16 8 Deleted Timestamp
* 24 520 File Name
*
* File format Windows 10+
* <table>
* <tr><th>Offset</th><th>Size</th><th>Description</th></tr>
* <tr><td>0</td><td>8</td><td>Header</td></tr>
* <tr><td>8</td><td>8</td><td>File Size</td></tr>
* <tr><td>16</td><td>8</td><td>Deleted TimeStamp</td></tr>
* <tr><td>24</td><td>4</td><td>File Name Length</td></tr>
* <tr><td>28</td><td>var</td><td>File Name</td></tr>
* </table>
* Offset Size Description
* 0 8 Header
* 8 8 File Size
* 16 8 Deleted TimeStamp
* 24 4 File Name Length
* 28 var File Name
*
* For versions of Windows prior to 10, header = 0x01. Windows 10+ header ==
* 0x02
*
* @param iFilePath Path to local copy of file in temp folder
*
* @throws FileNotFoundException
* @throws IOException
*/
private RecycledFileMetaData parseIFile(String iFilePath) throws FileNotFoundException, IOException {
private RecycledFileMetaData parseIFile(String iFilePath) throws IOException {
try {
byte[] allBytes = Files.readAllBytes(Paths.get(iFilePath));
ByteBuffer byteBuffer = ByteBuffer.wrap(allBytes);
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
@ -320,6 +320,9 @@ final class ExtractRecycleBin extends Extract {
String fileName = new String(stringBytes, "UTF-16LE"); //NON-NLS
return new RecycledFileMetaData(fileSize, timestamp, fileName);
} catch (IOException | BufferUnderflowException | IllegalArgumentException | ArrayIndexOutOfBoundsException ex) {
throw new IOException("Error parsing $I File, file is corrupt or not a valid I$ file", ex);
}
}
/**

View File

@ -66,6 +66,7 @@ import java.util.Set;
import java.util.HashSet;
import static java.util.Locale.US;
import static java.util.TimeZone.getTimeZone;
import org.apache.commons.io.FilenameUtils;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
@ -1176,7 +1177,21 @@ class ExtractRegistry extends Extract {
line = line.trim();
if (line.matches("^adoberdr v.*")) {
parseAdobeMRUList(regFileName, regFile, reader);
parseAdobeMRUList(regFile, reader);
} else if (line.matches("^mpmru v.*")) {
parseMediaPlayerMRUList(regFile, reader);
} else if (line.matches("^trustrecords v.*")) {
parseTrustrecordsMRUList(regFile, reader);
} else if (line.matches("^ArcHistory:")) {
parseArchHistoryMRUList(regFile, reader);
} else if (line.matches("^applets v.*")) {
parseGenericMRUList(regFile, reader);
} else if (line.matches("^mmc v.*")) {
parseGenericMRUList(regFile, reader);
} else if (line.matches("^winrar v.*")) {
parseWinRARMRUList(regFile, reader);
} else if (line.matches("^officedocs2010 v.*")) {
parseOfficeDocs2010MRUList(regFile, reader);
}
line = reader.readLine();
}
@ -1194,7 +1209,7 @@ class ExtractRegistry extends Extract {
*
* @throws FileNotFound and IOException
*/
private void parseAdobeMRUList(String regFileName, AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
private void parseAdobeMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
SimpleDateFormat adobePluginDateFormat = new SimpleDateFormat("yyyyMMddHHmmssZ", US);
@ -1215,6 +1230,7 @@ class ExtractRegistry extends Extract {
if (fileName.charAt(0) == '/') {
fileName = fileName.substring(1,fileName.length() - 1);
fileName = fileName.replaceFirst("/", ":/");
fileName = FilenameUtils.normalize(fileName, true);
}
// Check to see if more then 2 tokens, Date may not be populated, will default to 0
if (tokens.length > 2) {
@ -1240,10 +1256,275 @@ class ExtractRegistry extends Extract {
}
line = line.trim();
}
if (bbartifacts != null) {
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the mpmru records
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseMediaPlayerMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
while (!line.contains(SECTION_DIVIDER)) {
line = reader.readLine();
line = line.trim();
if (line.contains("LastWrite")) {
line = reader.readLine();
// Columns are
// FileX -> <Media file>
while (!line.contains(SECTION_DIVIDER)) {
// Split line on "> " which is the record delimiter between position and file
String tokens[] = line.split("> ");
String fileName = tokens[1];
fileName = FilenameUtils.normalize(fileName, true);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if(bba != null) {
bbartifacts.add(bba);
}
line = reader.readLine();
}
line = line.trim();
}
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the regripper output
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseGenericMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
while (!line.contains(SECTION_DIVIDER)) {
line = reader.readLine();
line = line.trim();
if (line.contains("LastWrite")) {
line = reader.readLine();
// Columns are
// FileX -> <file>
while (!line.contains(SECTION_DIVIDER) && !line.isEmpty() && !line.contains("Applets")) {
// Split line on "> " which is the record delimiter between position and file
String tokens[] = line.split("> ");
String fileName = tokens[1];
fileName = FilenameUtils.normalize(fileName, true);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if(bba != null) {
bbartifacts.add(bba);
}
line = reader.readLine();
}
line = line.trim();
}
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the WinRAR output
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseWinRARMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
while (!line.contains(SECTION_DIVIDER)) {
line = reader.readLine();
line = line.trim();
if (line.contains("LastWrite")) {
line = reader.readLine();
// Columns are
// FileX -> <Media file>
if (!line.isEmpty()) {
while (!line.contains(SECTION_DIVIDER)) {
// Split line on "> " which is the record delimiter between position and file
String tokens[] = line.split("> ");
String fileName = tokens[1];
fileName = FilenameUtils.normalize(fileName, true);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if(bba != null) {
bbartifacts.add(bba);
}
line = reader.readLine();
}
}
line = line.trim();
}
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the runmru ArcHistory records
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseArchHistoryMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
line = line.trim();
if (!line.contains("PathHistory:")) {
while (!line.contains("PathHistory:") && !line.isEmpty()) {
// Columns are
// <fileName>
String fileName = line;
fileName = FilenameUtils.normalize(fileName, true);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if (bba != null) {
bbartifacts.add(bba);
}
line = reader.readLine();
line = line.trim();
}
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the Office Documents 2010 records
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseOfficeDocs2010MRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
String line = reader.readLine();
line = line.trim();
// Reading to the SECTION DIVIDER to get next section of records to process. Dates appear to have
// multiple spaces in them that makes it harder to parse so next section will be easier to parse
while (!line.contains(SECTION_DIVIDER)) {
line = reader.readLine();
}
line = reader.readLine();
while (!line.contains(SECTION_DIVIDER)) {
// record has the following format
// 1294283922|REG|||OfficeDocs2010 - F:\Windows_time_Rules_xp.doc
String tokens[] = line.split("\\|");
Long docDate = Long.valueOf(tokens[0]);
String fileNameTokens[] = tokens[4].split(" - ");
String fileName = fileNameTokens[1];
fileName = FilenameUtils.normalize(fileName, true);
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, getName(), docDate));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if(bba != null) {
bbartifacts.add(bba);
}
line = reader.readLine();
line = line.trim();
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**
* Create recently used artifacts to parse the trustrecords records
*
* @param regFileName name of the regripper output file
*
* @param regFile registry file the artifact is associated with
*
* @param reader buffered reader to parse adobemru records
*
* @throws FileNotFound and IOException
*/
private void parseTrustrecordsMRUList(AbstractFile regFile, BufferedReader reader) throws FileNotFoundException, IOException {
String userProfile = regFile.getParentPath();
userProfile = userProfile.substring(0, userProfile.length() - 2);
List<BlackboardArtifact> bbartifacts = new ArrayList<>();
SimpleDateFormat pluginDateFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy", US);
Long usedTime = Long.valueOf(0);
String line = reader.readLine();
while (!line.contains(SECTION_DIVIDER)) {
line = reader.readLine();
line = line.trim();
usedTime = Long.valueOf(0);
if (!line.contains("**") && !line.contains("----------") && !line.contains("LastWrite")
&& !line.contains(SECTION_DIVIDER) && !line.isEmpty()) {
// Columns are
// Date : <File Name>/<Website>
// Split line on " : " which is the record delimiter between position and file
String fileName = null;
String tokens[] = line.split(" : ");
fileName = tokens[1];
fileName = fileName.replace("%USERPROFILE%", userProfile);
fileName = FilenameUtils.normalize(fileName, true);
// Time in the format of Wed May 31 14:33:03 2017 Z
try {
String fileUsedTime = tokens[0].replaceAll(" Z","");
Date usedDate = pluginDateFormat.parse(fileUsedTime);
usedTime = usedDate.getTime()/1000;
} catch (ParseException ex) {
// catching error and displaying date that could not be parsed
// we set the timestamp to 0 and continue on processing
logger.log(Level.WARNING, String.format("Failed to parse date/time %s for TrustRecords artifact.", tokens[0]), ex); //NON-NLS
}
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, getName(), usedTime));
BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes);
if(bba != null) {
bbartifacts.add(bba);
}
line = line.trim();
}
}
if (!bbartifacts.isEmpty()) {
postArtifacts(bbartifacts);
}
}
/**

View File

@ -29,6 +29,7 @@ import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.Collection;
import org.apache.commons.io.FilenameUtils;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.JLNK;
import org.sleuthkit.autopsy.coreutils.JLnkParser;
@ -107,7 +108,7 @@ class RecentDocumentsByLnk extends Extract {
}
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
String path = lnk.getBestPath();
String path = FilenameUtils.normalize(lnk.getBestPath(), true);
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH,
NbBundle.getMessage(this.getClass(),
"RecentDocumentsByLnk.parentModuleName.noSpace"),

79
developers/envvarsetup.py Executable file
View File

@ -0,0 +1,79 @@
import os
import sys
from os import path
from pathlib import PureWindowsPath
# taken from https://stackoverflow.com/questions/2946746/python-checking-if-a-user-has-administrator-privileges?rq=1
def isUserAdmin():
try:
# only windows users with admin privileges can read the C:\windows\temp
temp = os.listdir(os.sep.join([os.environ.get('SystemRoot','C:\\windows'),'temp']))
return True
except:
return False
if not isUserAdmin():
print("This script must be run with administrative privileges")
exit(1)
usage_message = "Usage: envarsetup.py [full path to parent directory of sleuthkit, autopsy, etc.]"
if len(sys.argv) < 2:
print(usage_message)
exit(1)
source_base_path = sys.argv[1]
if (not path.exists(source_base_path)):
print("path: \"{0}\" does not exist".format(source_base_path))
print(usage_message)
exit(1)
'''
The following 6 lines can be configured to the specified paths (if different) on your system.
open_jdk_64_home is the 64 bit jdk and is the assumed default
source_base_path is the directory containing all necessary repos (i.e. autopsy, sleuthkit, etc.)
open_jdk_32_home and postgres_32_home are only necessary if building binaries
'''
open_jdk_64_home = "C:\\Program Files\\ojdkbuild\\java-1.8.0-openjdk-1.8.0.222-1"
postgres_home = "C:\\Program Files\\PostgreSQL\\9.5"
ant_home = "C:\\Program Files\\NetBeans 8.2\\extide\\ant"
open_jdk_32_home = "C:\\Program Files (x86)\\ojdkbuild\\java-1.8.0-openjdk-1.8.0.222-1"
postgres_32_home = "C:\\Program Files (x86)\\PostgreSQL\\9.5"
open_jdk_home = open_jdk_64_home
def path_join(*args):
return str(PureWindowsPath(*args))
def set_var(env_var, env_val):
print("Setting {0} to {1}".format(env_var,env_val))
os.system("SETX {0} \"{1}\" /M".format(env_var,env_val))
def add_path(paths):
print("Adding to path: {0}".format(paths))
# insert paths at the beginning
paths.insert(0, "%PATH%")
# work around for command prompt to access PATH via %PATH%
cmd = "cmd.exe /k SETX PATH \"{0}\" /M & exit".format(";".join(paths))
os.system(cmd)
set_var("JAVA_HOME", open_jdk_home)
set_var("JRE_HOME", path_join(open_jdk_home, "jre"))
set_var("JDK_HOME", open_jdk_home)
set_var("LIBEWF_HOME", path_join(source_base_path, "libewf_64bit"))
set_var("LIBVHDI_HOME", path_join(source_base_path, "libvhdi_64bit"))
set_var("LIBVMDK_HOME", path_join(source_base_path, "libvmdk_64bit", "libvmdk"))
set_var("POSTGRESQL_HOME_64", postgres_home)
set_var("TSK_HOME", path_join(source_base_path, "sleuthkit"))
set_var("JDK_HOME_64", open_jdk_64_home)
set_var("JRE_HOME_64", path_join(open_jdk_64_home, "jre"))
set_var("JDK_HOME_32", open_jdk_32_home)
set_var("JRE_HOME_32", path_join(open_jdk_32_home, "jre"))
set_var("POSTGRESQL_HOME_32", postgres_32_home)
add_path([path_join(postgres_home, "bin"), ant_home])