mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-06 21:00:22 +00:00
commit
2fdfd83d6f
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2015-2019 Basis Technology Corp.
|
||||
* Copyright 2015-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -56,7 +56,7 @@ public final class Blackboard implements Closeable {
|
||||
@Deprecated
|
||||
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
|
||||
try {
|
||||
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "");
|
||||
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "", null);
|
||||
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
|
||||
throw new BlackboardException(ex.getMessage(), ex);
|
||||
}
|
||||
@ -117,6 +117,7 @@ public final class Blackboard implements Closeable {
|
||||
* @deprecated Do not use.
|
||||
*/
|
||||
@Deprecated
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
/*
|
||||
* No-op maintained for backwards compatibility. Clients should not
|
||||
|
@ -761,8 +761,7 @@ public final class CaseEventListener implements PropertyChangeListener {
|
||||
BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score,
|
||||
null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult();
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(newAnalysisResult, MODULE_NAME);
|
||||
blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null);
|
||||
break;
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS
|
||||
|
@ -87,7 +87,7 @@ public class IngestEventsListener {
|
||||
private final PropertyChangeListener pcl1 = new IngestModuleEventListener();
|
||||
private final PropertyChangeListener pcl2 = new IngestJobEventListener();
|
||||
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
|
||||
|
||||
|
||||
static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10;
|
||||
static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20;
|
||||
|
||||
@ -195,7 +195,7 @@ public class IngestEventsListener {
|
||||
public synchronized static void setFlagSeenDevices(boolean value) {
|
||||
flagSeenDevices = value;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Configure the listener to flag unique apps or not.
|
||||
*
|
||||
@ -204,7 +204,7 @@ public class IngestEventsListener {
|
||||
public synchronized static void setFlagUniqueArtifacts(boolean value) {
|
||||
flagUniqueArtifacts = value;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Are unique apps being flagged?
|
||||
*
|
||||
@ -256,10 +256,12 @@ public class IngestEventsListener {
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a "previously seen" hit for a device which was previously seen
|
||||
* in the central repository. NOTE: Artifacts that are too common will be skipped.
|
||||
* Create a "previously seen" hit for a device which was previously seen in
|
||||
* the central repository. NOTE: Artifacts that are too common will be
|
||||
* skipped.
|
||||
*
|
||||
* @param originalArtifact the artifact to create the "previously seen" item for
|
||||
* @param originalArtifact the artifact to create the "previously seen" item
|
||||
* for
|
||||
* @param caseDisplayNames the case names the artifact was previously seen
|
||||
* in
|
||||
* @param aType The correlation type.
|
||||
@ -271,19 +273,19 @@ public class IngestEventsListener {
|
||||
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
|
||||
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames,
|
||||
CorrelationAttributeInstance.Type aType, String value) {
|
||||
|
||||
|
||||
// calculate score
|
||||
Score score;
|
||||
int numCases = caseDisplayNames.size();
|
||||
if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) {
|
||||
score = Score.SCORE_LIKELY_NOTABLE;
|
||||
} else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) {
|
||||
score = Score.SCORE_NONE;
|
||||
score = Score.SCORE_NONE;
|
||||
} else {
|
||||
// don't make an Analysis Result, the artifact is too common.
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(","));
|
||||
String justification = "Previously seen in cases " + prevCases;
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
|
||||
@ -297,40 +299,42 @@ public class IngestEventsListener {
|
||||
value),
|
||||
new BlackboardAttribute(
|
||||
TSK_OTHER_CASES, MODULE_NAME,
|
||||
prevCases));
|
||||
prevCases));
|
||||
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(),
|
||||
score, justification);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a "previously unseen" hit for an application which was never seen in
|
||||
* the central repository.
|
||||
* Create a "previously unseen" hit for an application which was never seen
|
||||
* in the central repository.
|
||||
*
|
||||
* @param originalArtifact the artifact to create the "previously unseen" item
|
||||
* for
|
||||
* @param originalArtifact the artifact to create the "previously unseen"
|
||||
* item for
|
||||
* @param aType The correlation type.
|
||||
* @param value The correlation value.
|
||||
*/
|
||||
static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) {
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
TSK_CORRELATION_TYPE, MODULE_NAME,
|
||||
aType.getDisplayName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value));
|
||||
TSK_CORRELATION_VALUE, MODULE_NAME,
|
||||
value));
|
||||
makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "",
|
||||
Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an artifact to flag the passed in artifact.
|
||||
*
|
||||
* @param newArtifactType Type of artifact to create.
|
||||
* @param originalArtifact Artifact in current case we want to flag
|
||||
* @param attributesForNewArtifact Attributes to assign to the new artifact
|
||||
* @param configuration The configuration to be specified for the new artifact hit
|
||||
* @param score sleuthkit.datamodel.Score to be assigned to this artifact
|
||||
* @param configuration The configuration to be specified for the
|
||||
* new artifact hit
|
||||
* @param score sleuthkit.datamodel.Score to be assigned
|
||||
* to this artifact
|
||||
* @param justification Justification string
|
||||
*/
|
||||
private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration,
|
||||
@ -341,14 +345,13 @@ public class IngestEventsListener {
|
||||
// Create artifact if it doesn't already exist.
|
||||
BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID());
|
||||
if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) {
|
||||
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
|
||||
newArtifactType, score,
|
||||
BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult(
|
||||
newArtifactType, score,
|
||||
null, configuration, justification, attributesForNewArtifact)
|
||||
.getAnalysisResult();
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(newArtifact, MODULE_NAME);
|
||||
blackboard.postArtifact(newArtifact, MODULE_NAME, null);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
@ -549,14 +552,14 @@ public class IngestEventsListener {
|
||||
for (BlackboardArtifact bbArtifact : bbArtifacts) {
|
||||
// makeCorrAttrToSave will filter out artifacts which should not be sources of CR data.
|
||||
List<CorrelationAttributeInstance> convertedArtifacts = new ArrayList<>();
|
||||
if (bbArtifact instanceof DataArtifact){
|
||||
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact)bbArtifact));
|
||||
}
|
||||
if (bbArtifact instanceof DataArtifact) {
|
||||
convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact));
|
||||
}
|
||||
for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) {
|
||||
try {
|
||||
// Only do something with this artifact if it's unique within the job
|
||||
if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) {
|
||||
|
||||
|
||||
// Get a list of instances for a given value (hash, email, etc.)
|
||||
List<CorrelationAttributeInstance> previousOccurrences = new ArrayList<>();
|
||||
// check if we are flagging things
|
||||
@ -591,7 +594,7 @@ public class IngestEventsListener {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// flag previously seen devices and communication accounts (emails, phones, etc)
|
||||
if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty()
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
@ -605,12 +608,12 @@ public class IngestEventsListener {
|
||||
List<String> caseDisplayNames = getCaseDisplayNames(previousOccurrences);
|
||||
makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
}
|
||||
|
||||
|
||||
// flag previously unseen apps and domains
|
||||
if (flagUniqueItemsEnabled
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) {
|
||||
|
||||
|
||||
if (previousOccurrences.isEmpty()) {
|
||||
makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
}
|
||||
@ -635,7 +638,7 @@ public class IngestEventsListener {
|
||||
} // DATA_ADDED
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets case display names for a list of CorrelationAttributeInstance.
|
||||
*
|
||||
@ -666,5 +669,5 @@ public class IngestEventsListener {
|
||||
}
|
||||
}
|
||||
return caseNames;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -87,6 +87,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
private Blackboard blackboard;
|
||||
private final boolean createCorrelationProperties;
|
||||
private final boolean flagUniqueArtifacts;
|
||||
private IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Instantiate the Central Repository ingest module.
|
||||
@ -229,6 +230,8 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
})
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
IngestEventsListener.incrementCorrelationEngineModuleCount();
|
||||
|
||||
/*
|
||||
@ -256,7 +259,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
}
|
||||
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) {
|
||||
IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts);
|
||||
}
|
||||
}
|
||||
|
||||
if (CentralRepository.isEnabled() == false) {
|
||||
/*
|
||||
@ -360,12 +363,12 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
|
||||
BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE,
|
||||
null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes)
|
||||
.getAnalysisResult();
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(tifArtifact, MODULE_NAME);
|
||||
blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
|
||||
return Version.getVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Override
|
||||
public boolean isFileIngestModuleFactory() {
|
||||
return true;
|
||||
}
|
||||
@ -83,7 +83,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter {
|
||||
}
|
||||
throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean hasGlobalSettingsPanel() {
|
||||
return true;
|
||||
|
@ -148,7 +148,7 @@ public class ContactArtifactViewer extends javax.swing.JPanel implements Artifac
|
||||
@Override
|
||||
public Component getComponent() {
|
||||
// Slap a vertical scrollbar on the panel.
|
||||
return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER);
|
||||
return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -149,7 +149,8 @@ class MessageArtifactWorker extends SwingWorker<MessageArtifactWorker.MesssageAr
|
||||
static Optional<BlackboardArtifact> getAssociatedArtifact(final BlackboardArtifact artifact) throws TskCoreException {
|
||||
BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_TYPE);
|
||||
if (attribute != null) {
|
||||
return Optional.of(artifact.getSleuthkitCase().getArtifactByArtifactId(attribute.getValueLong()));
|
||||
//in the context of the Message content viewer the associated artifact will always be a data artifact
|
||||
return Optional.of(artifact.getSleuthkitCase().getBlackboard().getDataArtifactById(attribute.getValueLong()));
|
||||
}
|
||||
return Optional.empty();
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -24,7 +24,6 @@ import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Account;
|
||||
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -188,10 +187,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
switch (xryKey) {
|
||||
case TEL:
|
||||
case NUMBER:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Apply namespace or direction
|
||||
if (xryNamespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
|
||||
callerId = pair.getValue();
|
||||
@ -206,30 +205,30 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
// Although confusing, as these are also 'name spaces', it appears
|
||||
// later versions of XRY just made these standardized lines.
|
||||
case TO:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
calleeList.add(pair.getValue());
|
||||
break;
|
||||
case FROM:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
callerId = pair.getValue();
|
||||
break;
|
||||
case TIME:
|
||||
try {
|
||||
//Tranform value to seconds since epoch
|
||||
long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
|
||||
startTime = dateTimeSinceEpoch;
|
||||
} catch (DateTimeParseException ex) {
|
||||
logger.log(Level.WARNING, String.format("[XRY DSP] Assumption"
|
||||
+ " about the date time formatting of call logs is "
|
||||
+ "not right. Here is the value [ %s ]", pair.getValue()), ex);
|
||||
}
|
||||
break;
|
||||
//Tranform value to seconds since epoch
|
||||
long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
|
||||
startTime = dateTimeSinceEpoch;
|
||||
} catch (DateTimeParseException ex) {
|
||||
logger.log(Level.WARNING, String.format("[XRY DSP] Assumption"
|
||||
+ " about the date time formatting of call logs is "
|
||||
+ "not right. Here is the value [ %s ]", pair.getValue()), ex);
|
||||
}
|
||||
break;
|
||||
case DIRECTION:
|
||||
String directionString = pair.getValue().toLowerCase();
|
||||
if (directionString.equals("incoming")) {
|
||||
@ -263,7 +262,6 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
|
||||
// Make sure we have the required fields, otherwise the CommHelper will
|
||||
// complain about illegal arguments.
|
||||
|
||||
// These are all the invalid combinations.
|
||||
if (callerId == null && calleeList.isEmpty()
|
||||
|| direction == CommunicationDirection.INCOMING && callerId == null
|
||||
@ -288,10 +286,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
// it would have been a valid combination.
|
||||
if (callerId != null) {
|
||||
try {
|
||||
currentCase.getCommunicationsManager().createAccountFileInstance(
|
||||
Account.Type.PHONE, callerId, PARSER_NAME, parent);
|
||||
currentCase.getCommunicationsManager().createAccountFileInstance(
|
||||
Account.Type.PHONE, callerId, PARSER_NAME, parent, null);
|
||||
} catch (InvalidAccountIDException ex) {
|
||||
logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex);
|
||||
logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex);
|
||||
}
|
||||
|
||||
otherAttributes.add(new BlackboardAttribute(
|
||||
@ -301,12 +299,11 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
|
||||
for (String phone : calleeList) {
|
||||
try {
|
||||
currentCase.getCommunicationsManager().createAccountFileInstance(
|
||||
Account.Type.PHONE, phone, PARSER_NAME, parent);
|
||||
currentCase.getCommunicationsManager().createAccountFileInstance(
|
||||
Account.Type.PHONE, phone, PARSER_NAME, parent, null);
|
||||
} catch (InvalidAccountIDException ex) {
|
||||
logger.log(Level.WARNING, String.format("Invalid account identifier %s", phone), ex);
|
||||
}
|
||||
|
||||
|
||||
otherAttributes.add(new BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
|
||||
@ -315,17 +312,17 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
|
||||
|
||||
if (!otherAttributes.isEmpty()) {
|
||||
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes);
|
||||
|
||||
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
|
||||
|
||||
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
|
||||
}
|
||||
} else {
|
||||
|
||||
// Otherwise we can safely use the helper.
|
||||
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
|
||||
currentCase, PARSER_NAME, parent, Account.Type.PHONE);
|
||||
currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
|
||||
|
||||
helper.addCalllog(direction, callerId, calleeList, startTime,
|
||||
endTime, callType, otherAttributes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -24,7 +24,6 @@ import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Account;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -136,7 +135,7 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
|
||||
// complain about illegal arguments.
|
||||
if (phoneNumber != null || homePhoneNumber != null || mobilePhoneNumber != null || hasAnEmail) {
|
||||
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
|
||||
currentCase, PARSER_NAME, parent, Account.Type.DEVICE);
|
||||
currentCase, PARSER_NAME, parent, Account.Type.DEVICE, null);
|
||||
|
||||
helper.addContact(contactName, phoneNumber, homePhoneNumber,
|
||||
mobilePhoneNumber, emailAddr, additionalAttributes);
|
||||
@ -144,8 +143,8 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
|
||||
// Just create an artifact with the attributes that we do have.
|
||||
if (!additionalAttributes.isEmpty()) {
|
||||
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes);
|
||||
|
||||
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME);
|
||||
|
||||
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -95,6 +95,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* Indicates if the display name of the XRY key is a recognized type.
|
||||
*
|
||||
* @param name
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static boolean contains(String name) {
|
||||
@ -114,6 +115,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* contains() before hand.
|
||||
*
|
||||
* @param name
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static XryKey fromDisplayName(String name) {
|
||||
@ -149,6 +151,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* type.
|
||||
*
|
||||
* @param xryNamespace
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static boolean contains(String xryNamespace) {
|
||||
@ -169,6 +172,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* contains() before hand.
|
||||
*
|
||||
* @param xryNamespace
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static XryNamespace fromDisplayName(String xryNamespace) {
|
||||
@ -206,6 +210,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* Indicates if the display name of the XRY key is a recognized type.
|
||||
*
|
||||
* @param name
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static boolean contains(String name) {
|
||||
@ -225,6 +230,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* contains() before hand.
|
||||
*
|
||||
* @param name
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static XryMetaKey fromDisplayName(String name) {
|
||||
@ -253,11 +259,13 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* assumption is correct, otherwise an error will appear in the logs.
|
||||
*
|
||||
* @param reader The XRYFileReader that reads XRY entities from the
|
||||
* Message-SMS report.
|
||||
* Message-SMS report.
|
||||
* @param parent The parent Content to create artifacts from.
|
||||
* @throws IOException If an I/O error is encountered during report reading
|
||||
*
|
||||
* @throws IOException If an I/O error is encountered during report
|
||||
* reading
|
||||
* @throws TskCoreException If an error during artifact creation is
|
||||
* encountered.
|
||||
* encountered.
|
||||
*/
|
||||
@Override
|
||||
public void parse(XRYFileReader reader, Content parent, SleuthkitCase currentCase) throws IOException, TskCoreException, BlackboardException {
|
||||
@ -270,10 +278,10 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
|
||||
while (reader.hasNextEntity()) {
|
||||
String xryEntity = reader.nextEntity();
|
||||
|
||||
|
||||
// This call will combine all segmented text into a single key value pair
|
||||
List<XRYKeyValuePair> pairs = getXRYKeyValuePairs(xryEntity, reader, referenceNumbersSeen);
|
||||
|
||||
|
||||
// Transform all the data from XRY land into the appropriate CommHelper
|
||||
// data types.
|
||||
final String messageType = PARSER_NAME;
|
||||
@ -286,8 +294,8 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
String text = null;
|
||||
final String threadId = null;
|
||||
final Collection<BlackboardAttribute> otherAttributes = new ArrayList<>();
|
||||
|
||||
for(XRYKeyValuePair pair : pairs) {
|
||||
|
||||
for (XRYKeyValuePair pair : pairs) {
|
||||
XryNamespace namespace = XryNamespace.NONE;
|
||||
if (XryNamespace.contains(pair.getNamespace())) {
|
||||
namespace = XryNamespace.fromDisplayName(pair.getNamespace());
|
||||
@ -298,55 +306,55 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
switch (key) {
|
||||
case TEL:
|
||||
case NUMBER:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
// Apply namespace or direction
|
||||
if(namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
|
||||
if (namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) {
|
||||
senderId = pair.getValue();
|
||||
} else if(namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) {
|
||||
} else if (namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) {
|
||||
recipientIdsList.add(pair.getValue());
|
||||
} else {
|
||||
try {
|
||||
currentCase.getCommunicationsManager().createAccountFileInstance(
|
||||
Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent);
|
||||
Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent, null);
|
||||
} catch (InvalidAccountIDException ex) {
|
||||
logger.log(Level.WARNING, String.format("Invalid account identifier %s", pair.getValue()), ex);
|
||||
}
|
||||
|
||||
otherAttributes.add(new BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
|
||||
PARSER_NAME, pair.getValue()));
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
|
||||
PARSER_NAME, pair.getValue()));
|
||||
}
|
||||
break;
|
||||
// Although confusing, as these are also 'name spaces', it appears
|
||||
// later versions of XRY just made these standardized lines.
|
||||
case FROM:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
senderId = pair.getValue();
|
||||
break;
|
||||
case TO:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
recipientIdsList.add(pair.getValue());
|
||||
break;
|
||||
case TIME:
|
||||
try {
|
||||
//Tranform value to seconds since epoch
|
||||
long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
|
||||
dateTime = dateTimeSinceInEpoch;
|
||||
} catch (DateTimeParseException ex) {
|
||||
logger.log(Level.WARNING, String.format("[%s] Assumption"
|
||||
+ " about the date time formatting of messages is "
|
||||
+ "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex);
|
||||
}
|
||||
break;
|
||||
//Tranform value to seconds since epoch
|
||||
long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue());
|
||||
dateTime = dateTimeSinceInEpoch;
|
||||
} catch (DateTimeParseException ex) {
|
||||
logger.log(Level.WARNING, String.format("[%s] Assumption"
|
||||
+ " about the date time formatting of messages is "
|
||||
+ "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex);
|
||||
}
|
||||
break;
|
||||
case TYPE:
|
||||
switch (normalizedValue) {
|
||||
case "incoming":
|
||||
@ -406,11 +414,11 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
}
|
||||
break;
|
||||
case SERVICE_CENTER:
|
||||
if(!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
if (!XRYUtils.isPhoneValid(pair.getValue())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
|
||||
|
||||
otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
|
||||
PARSER_NAME, pair.getValue()));
|
||||
break;
|
||||
default:
|
||||
@ -427,18 +435,18 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
|
||||
currentCase, PARSER_NAME, parent, Account.Type.PHONE);
|
||||
|
||||
helper.addMessage(messageType, direction, senderId, recipientIdsList,
|
||||
dateTime, readStatus, subject, text, threadId, otherAttributes);
|
||||
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
|
||||
currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
|
||||
|
||||
helper.addMessage(messageType, direction, senderId, recipientIdsList,
|
||||
dateTime, readStatus, subject, text, threadId, otherAttributes);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts all pairs from the XRY Entity. This function
|
||||
* will unify any segmented text, if need be.
|
||||
* Extracts all pairs from the XRY Entity. This function will unify any
|
||||
* segmented text, if need be.
|
||||
*/
|
||||
private List<XRYKeyValuePair> getXRYKeyValuePairs(String xryEntity,
|
||||
XRYFileReader reader, Set<Integer> referenceValues) throws IOException {
|
||||
@ -508,10 +516,13 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* Builds up segmented message entities so that the text is unified for a
|
||||
* single artifact.
|
||||
*
|
||||
* @param reader File reader that is producing XRY entities.
|
||||
* @param referenceNumbersSeen All known references numbers up until this point.
|
||||
* @param xryEntity The source XRY entity.
|
||||
* @param reader File reader that is producing XRY entities.
|
||||
* @param referenceNumbersSeen All known references numbers up until this
|
||||
* point.
|
||||
* @param xryEntity The source XRY entity.
|
||||
*
|
||||
* @return
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
private String getSegmentedText(String[] xryEntity, XRYFileReader reader,
|
||||
@ -604,7 +615,8 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
* Extracts the value of the XRY meta key, if any.
|
||||
*
|
||||
* @param xryLines XRY entity to extract from.
|
||||
* @param metaKey The key type to extract.
|
||||
* @param metaKey The key type to extract.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Optional<Integer> getMetaKeyValue(String[] xryLines, XryMetaKey metaKey) {
|
||||
@ -629,10 +641,12 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
/**
|
||||
* Extracts the ith XRY Key Value pair in the XRY Entity.
|
||||
*
|
||||
* The total number of pairs can be determined via getCountOfKeyValuePairs().
|
||||
* The total number of pairs can be determined via
|
||||
* getCountOfKeyValuePairs().
|
||||
*
|
||||
* @param xryLines XRY entity.
|
||||
* @param index The requested Key Value pair.
|
||||
* @param index The requested Key Value pair.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private Optional<XRYKeyValuePair> getKeyValuePairByIndex(String[] xryLines, int index) {
|
||||
@ -672,4 +686,4 @@ final class XRYMessagesFileParser implements XRYFileParser {
|
||||
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -388,10 +388,10 @@ public class ResultsSorter implements Comparator<Result> {
|
||||
Bundle.FileSorter_SortingMethod_keywordlist_displayName()), // Sort alphabetically by list of keyword list names found
|
||||
BY_FULL_PATH(new ArrayList<>(),
|
||||
Bundle.FileSorter_SortingMethod_fullPath_displayName()), // Sort alphabetically by path
|
||||
BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()),
|
||||
BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()),
|
||||
BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()),
|
||||
BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName());
|
||||
BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()),
|
||||
BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()),
|
||||
BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()),
|
||||
BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName());
|
||||
|
||||
private final String displayName;
|
||||
private final List<DiscoveryAttributes.AttributeType> requiredAttributes;
|
||||
|
@ -73,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule {
|
||||
|
||||
// Skip anything other than actual file system files.
|
||||
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (file.isFile() == false)) {
|
||||
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (file.isFile() == false)) {
|
||||
return IngestModule.ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -111,10 +111,13 @@ class SampleFileIngestModule implements FileIngestModule {
|
||||
addToBlackboardPostCount(context.getJobId(), 1L);
|
||||
|
||||
/*
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
* Post the artifact to the blackboard. Doing so will cause events
|
||||
* to be published that will trigger additional analysis, if
|
||||
* applicable. For example, the creation of timeline events,
|
||||
* indexing of the artifact for keyword search, and analysis by the
|
||||
* data artifact ingest modules if the artifact is a data artifact.
|
||||
*/
|
||||
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
|
||||
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName(), context.getJobId());
|
||||
|
||||
return IngestModule.ProcessResult.OK;
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2015-2017 Basis Technology Corp.
|
||||
* Copyright 2015-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -21,66 +21,76 @@ package org.sleuthkit.autopsy.guiutils;
|
||||
import java.awt.Component;
|
||||
import java.time.Duration;
|
||||
import javax.swing.JTable;
|
||||
import static javax.swing.SwingConstants.CENTER;
|
||||
|
||||
/**
|
||||
* A JTable cell renderer that renders a duration represented as a long as a
|
||||
* string with days, hours, minutes, and seconds components. It center-aligns
|
||||
* cell content and grays out the cell if the table is disabled.
|
||||
*/
|
||||
public class DurationCellRenderer extends GrayableCellRenderer {
|
||||
public final class DurationCellRenderer extends GrayableCellRenderer {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final char UNIT_SEPARATOR_CHAR = ':';
|
||||
|
||||
public DurationCellRenderer() {
|
||||
setHorizontalAlignment(CENTER);
|
||||
setHorizontalAlignment(LEFT);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
|
||||
if (value instanceof Long) {
|
||||
{
|
||||
setText(DurationCellRenderer.longToDurationString((long) value));
|
||||
}
|
||||
setText(DurationCellRenderer.longToDurationString((long) value));
|
||||
}
|
||||
grayCellIfTableNotEnabled(table, isSelected);
|
||||
return this;
|
||||
}
|
||||
|
||||
public static char getUnitSeperator() {
|
||||
return UNIT_SEPARATOR_CHAR;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a duration represented by a long to a human readable string with
|
||||
* with days, hours, minutes, and seconds components.
|
||||
*
|
||||
* @param duration - the representation of the duration in long form
|
||||
* @param duration - The representation of the duration in long form.
|
||||
*
|
||||
* @return - the representation of the duration in String form.
|
||||
* @return - The representation of the duration in String form.
|
||||
*/
|
||||
public static String longToDurationString(long duration) {
|
||||
Duration d = Duration.ofMillis(duration);
|
||||
if (d.isNegative()) {
|
||||
d = Duration.ofMillis(-duration);
|
||||
d = Duration.ofMillis(0); //it being 0 for a few seconds seems preferable to it counting down to 0 then back up from 0
|
||||
}
|
||||
|
||||
String result;
|
||||
long days = d.toDays();
|
||||
long hours = d.minusDays(days).toHours();
|
||||
long minutes = d.minusDays(days).minusHours(hours).toMinutes();
|
||||
long seconds = d.minusDays(days).minusHours(hours).minusMinutes(minutes).getSeconds();
|
||||
|
||||
if (minutes > 0) {
|
||||
if (hours > 0) {
|
||||
if (days > 0) {
|
||||
result = days + " d " + hours + " h " + minutes + " m " + seconds + " s";
|
||||
} else {
|
||||
result = hours + " h " + minutes + " m " + seconds + " s";
|
||||
}
|
||||
} else {
|
||||
result = minutes + " m " + seconds + " s";
|
||||
}
|
||||
} else {
|
||||
result = seconds + " s";
|
||||
if (days < 0) {
|
||||
days = 0;
|
||||
}
|
||||
return result;
|
||||
if (hours < 0) {
|
||||
hours = 0;
|
||||
}
|
||||
if (minutes < 0) {
|
||||
minutes = 0;
|
||||
}
|
||||
if (seconds < 0) {
|
||||
seconds = 0;
|
||||
}
|
||||
StringBuilder results = new StringBuilder(12);
|
||||
if (days < 99) {
|
||||
results.append(String.format("%02d", days));
|
||||
} else {
|
||||
results.append(days); //in the off chance something has been running for over 99 days lets allow it to stand out a bit by having as many characters as it needs
|
||||
}
|
||||
results.append(UNIT_SEPARATOR_CHAR);
|
||||
results.append(String.format("%02d", hours));
|
||||
results.append(UNIT_SEPARATOR_CHAR);
|
||||
results.append(String.format("%02d", minutes));
|
||||
results.append(UNIT_SEPARATOR_CHAR);
|
||||
results.append(String.format("%02d", seconds));
|
||||
return results.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -23,27 +23,28 @@ import java.util.Optional;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
|
||||
/**
|
||||
* A pipeline of data artifact ingest modules used to execute data artifact
|
||||
* A pipeline of data artifact ingest modules used to perform data artifact
|
||||
* ingest tasks for an ingest job.
|
||||
*/
|
||||
final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIngestTask> {
|
||||
final class DataArtifactIngestPipeline extends IngestPipeline<DataArtifactIngestTask> {
|
||||
|
||||
/**
|
||||
* Constructs a pipeline of data artifact ingest modules used to execute
|
||||
* Constructs a pipeline of data artifact ingest modules used to perform
|
||||
* data artifact ingest tasks for an ingest job.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline that owns this ingest
|
||||
* task pipeline.
|
||||
* @param moduleTemplates The ingest module templates that define this
|
||||
* pipeline. May be an empty list.
|
||||
* @param ingestJobExecutor The ingest job executor for this pipeline.
|
||||
* @param moduleTemplates The ingest module templates to be used to
|
||||
* construct the ingest modules for this pipeline.
|
||||
* May be an empty list if this type of pipeline is
|
||||
* not needed for the ingest job.
|
||||
*/
|
||||
DataArtifactIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobPipeline, moduleTemplates);
|
||||
DataArtifactIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobExecutor, moduleTemplates);
|
||||
}
|
||||
|
||||
@Override
|
||||
Optional<PipelineModule<DataArtifactIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
|
||||
Optional<IngestTaskPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty();
|
||||
Optional<IngestPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty();
|
||||
if (template.isDataArtifactIngestModuleTemplate()) {
|
||||
DataArtifactIngestModule ingestModule = template.createDataArtifactIngestModule();
|
||||
module = Optional.of(new DataArtifactIngestPipelineModule(ingestModule, template.getModuleName()));
|
||||
@ -52,18 +53,18 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
|
||||
}
|
||||
|
||||
@Override
|
||||
void prepareForTask(DataArtifactIngestTask task) throws IngestTaskPipelineException {
|
||||
void prepareForTask(DataArtifactIngestTask task) throws IngestPipelineException {
|
||||
}
|
||||
|
||||
@Override
|
||||
void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestTaskPipelineException {
|
||||
void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestPipelineException {
|
||||
}
|
||||
|
||||
/**
|
||||
* A decorator that adds ingest infrastructure operations to a data artifact
|
||||
* ingest module.
|
||||
*/
|
||||
static final class DataArtifactIngestPipelineModule extends IngestTaskPipeline.PipelineModule<DataArtifactIngestTask> {
|
||||
static final class DataArtifactIngestPipelineModule extends IngestPipeline.PipelineModule<DataArtifactIngestTask> {
|
||||
|
||||
private final DataArtifactIngestModule module;
|
||||
|
||||
@ -80,7 +81,7 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
|
||||
}
|
||||
|
||||
@Override
|
||||
void executeTask(IngestJobPipeline ingestJobPipeline, DataArtifactIngestTask task) throws IngestModuleException {
|
||||
void process(IngestJobExecutor ingestJobExecutor, DataArtifactIngestTask task) throws IngestModuleException {
|
||||
DataArtifact artifact = task.getDataArtifact();
|
||||
module.process(artifact);
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ import org.sleuthkit.datamodel.DataArtifact;
|
||||
|
||||
/**
|
||||
* A data artifact ingest task that will be executed by an ingest thread using a
|
||||
* given ingest job pipeline.
|
||||
* given ingest job executor.
|
||||
*/
|
||||
final class DataArtifactIngestTask extends IngestTask {
|
||||
|
||||
@ -30,14 +30,14 @@ final class DataArtifactIngestTask extends IngestTask {
|
||||
|
||||
/**
|
||||
* Constructs a data artifact ingest task that will be executed by an ingest
|
||||
* thread using a given ingest job pipeline.
|
||||
* thread using a given ingest job executor.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline to use to execute the
|
||||
* @param ingestJobExecutor The ingest job executor to use to execute the
|
||||
* task.
|
||||
* @param artifact The data artifact to be processed.
|
||||
*/
|
||||
DataArtifactIngestTask(IngestJobPipeline ingestJobPipeline, DataArtifact artifact) {
|
||||
super(ingestJobPipeline);
|
||||
DataArtifactIngestTask(IngestJobExecutor ingestJobExecutor, DataArtifact artifact) {
|
||||
super(ingestJobExecutor);
|
||||
this.artifact = artifact;
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ final class DataArtifactIngestTask extends IngestTask {
|
||||
@Override
|
||||
void execute(long threadId) {
|
||||
super.setThreadId(threadId);
|
||||
getIngestJobPipeline().execute(this);
|
||||
getIngestJobExecutor().execute(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -23,10 +23,10 @@ package org.sleuthkit.autopsy.ingest;
|
||||
*/
|
||||
public class DataSourceIngestModuleProgress {
|
||||
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
|
||||
DataSourceIngestModuleProgress(IngestJobPipeline pipeline) {
|
||||
this.ingestJobPipeline = pipeline;
|
||||
DataSourceIngestModuleProgress(IngestJobExecutor ingestJobExecutor) {
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -38,7 +38,7 @@ public class DataSourceIngestModuleProgress {
|
||||
* data source.
|
||||
*/
|
||||
public void switchToDeterminate(int workUnits) {
|
||||
this.ingestJobPipeline.switchDataSourceIngestProgressBarToDeterminate(workUnits);
|
||||
ingestJobExecutor.switchDataSourceIngestProgressBarToDeterminate(workUnits);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -46,7 +46,7 @@ public class DataSourceIngestModuleProgress {
|
||||
* the total work units to process the data source is unknown.
|
||||
*/
|
||||
public void switchToIndeterminate() {
|
||||
this.ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
|
||||
ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -56,7 +56,7 @@ public class DataSourceIngestModuleProgress {
|
||||
* @param workUnits Number of work units performed so far by the module.
|
||||
*/
|
||||
public void progress(int workUnits) {
|
||||
this.ingestJobPipeline.advanceDataSourceIngestProgressBar("", workUnits);
|
||||
ingestJobExecutor.advanceDataSourceIngestProgressBar("", workUnits);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,7 +65,7 @@ public class DataSourceIngestModuleProgress {
|
||||
* @param message Message to display
|
||||
*/
|
||||
public void progress(String message) {
|
||||
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(message);
|
||||
ingestJobExecutor.advanceDataSourceIngestProgressBar(message);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -76,7 +76,7 @@ public class DataSourceIngestModuleProgress {
|
||||
* @param workUnits Number of work units performed so far by the module.
|
||||
*/
|
||||
public void progress(String currentTask, int workUnits) {
|
||||
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(currentTask, workUnits);
|
||||
ingestJobExecutor.advanceDataSourceIngestProgressBar(currentTask, workUnits);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -29,7 +29,7 @@ import org.sleuthkit.datamodel.Content;
|
||||
* A pipeline of data source level ingest modules for executing data source
|
||||
* level ingest tasks for an ingest job.
|
||||
*/
|
||||
final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngestTask> {
|
||||
final class DataSourceIngestPipeline extends IngestPipeline<DataSourceIngestTask> {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
|
||||
private static final IngestManager ingestManager = IngestManager.getInstance();
|
||||
@ -38,17 +38,19 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
|
||||
* Constructs a pipeline of data source level ingest modules for performing
|
||||
* data source level ingest tasks for an ingest job.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline.
|
||||
* @param moduleTemplates The ingest module templates that define this
|
||||
* pipeline.
|
||||
* @param ingestJobExecutor The ingest job executor for this pipeline.
|
||||
* @param moduleTemplates The ingest module templates to be used to
|
||||
* construct the ingest modules for this pipeline.
|
||||
* May be an empty list if this type of pipeline is
|
||||
* not needed for the ingest job.
|
||||
*/
|
||||
DataSourceIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobPipeline, moduleTemplates);
|
||||
DataSourceIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobExecutor, moduleTemplates);
|
||||
}
|
||||
|
||||
@Override
|
||||
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
|
||||
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty();
|
||||
Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
|
||||
Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty();
|
||||
if (template.isDataSourceIngestModuleTemplate()) {
|
||||
DataSourceIngestModule ingestModule = template.createDataSourceIngestModule();
|
||||
module = Optional.of(new DataSourcePipelineModule(ingestModule, template.getModuleName()));
|
||||
@ -69,7 +71,7 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
|
||||
* A wrapper that adds ingest infrastructure operations to a data source
|
||||
* level ingest module.
|
||||
*/
|
||||
static final class DataSourcePipelineModule extends IngestTaskPipeline.PipelineModule<DataSourceIngestTask> {
|
||||
static final class DataSourcePipelineModule extends IngestPipeline.PipelineModule<DataSourceIngestTask> {
|
||||
|
||||
private final DataSourceIngestModule module;
|
||||
|
||||
@ -83,18 +85,18 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
|
||||
}
|
||||
|
||||
@Override
|
||||
void executeTask(IngestJobPipeline ingestJobPipeline, DataSourceIngestTask task) throws IngestModuleException {
|
||||
void process(IngestJobExecutor ingestJobExecutor, DataSourceIngestTask task) throws IngestModuleException {
|
||||
Content dataSource = task.getDataSource();
|
||||
String progressBarDisplayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.displayName", getDisplayName(), dataSource.getName());
|
||||
ingestJobPipeline.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
|
||||
ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate();
|
||||
ingestJobExecutor.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
|
||||
ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
|
||||
ingestManager.setIngestTaskProgress(task, getDisplayName());
|
||||
logger.log(Level.INFO, "{0} analysis of {1} starting", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
|
||||
module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobPipeline));
|
||||
module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobExecutor));
|
||||
logger.log(Level.INFO, "{0} analysis of {1} finished", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
|
||||
if (!ingestJobPipeline.isCancelled() && ingestJobPipeline.currentDataSourceIngestModuleIsCancelled()) {
|
||||
ingestJobPipeline.currentDataSourceIngestModuleCancellationCompleted(getDisplayName());
|
||||
}
|
||||
if (!ingestJobExecutor.isCancelled() && ingestJobExecutor.currentDataSourceIngestModuleIsCancelled()) {
|
||||
ingestJobExecutor.currentDataSourceIngestModuleCancellationCompleted(getDisplayName());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,25 +20,25 @@ package org.sleuthkit.autopsy.ingest;
|
||||
|
||||
/**
|
||||
* A data source level ingest task that will be executed by an ingest thread
|
||||
* using a given ingest job pipeline.
|
||||
* using a given ingest job executor.
|
||||
*/
|
||||
final class DataSourceIngestTask extends IngestTask {
|
||||
|
||||
/**
|
||||
* Constructs a data source level ingest task that will be executed by an
|
||||
* ingest thread using a given ingest job pipeline.
|
||||
* ingest thread using a given ingest job executor.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline to use to execute the
|
||||
* @param ingestJobExecutor The ingest job executor to use to execute the
|
||||
* task.
|
||||
*/
|
||||
DataSourceIngestTask(IngestJobPipeline ingestJobPipeline) {
|
||||
super(ingestJobPipeline);
|
||||
DataSourceIngestTask(IngestJobExecutor ingestJobExecutor) {
|
||||
super(ingestJobExecutor);
|
||||
}
|
||||
|
||||
@Override
|
||||
void execute(long threadId) {
|
||||
super.setThreadId(threadId);
|
||||
getIngestJobPipeline().execute(this);
|
||||
}
|
||||
|
||||
getIngestJobExecutor().execute(this);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -39,32 +39,34 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
@NbBundle.Messages({
|
||||
"FileIngestPipeline_SaveResults_Activity=Saving Results"
|
||||
})
|
||||
final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
final class FileIngestPipeline extends IngestPipeline<FileIngestTask> {
|
||||
|
||||
private static final int FILE_BATCH_SIZE = 500;
|
||||
private static final String SAVE_RESULTS_ACTIVITY = Bundle.FileIngestPipeline_SaveResults_Activity();
|
||||
private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName());
|
||||
private static final IngestManager ingestManager = IngestManager.getInstance();
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
private final List<AbstractFile> fileBatch;
|
||||
|
||||
/**
|
||||
* Constructs a pipeline of file ingest modules for executing file ingest
|
||||
* tasks for an ingest job.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline.
|
||||
* @param moduleTemplates The ingest module templates that define this
|
||||
* pipeline.
|
||||
* @param ingestJobExecutor The ingest job executor for this pipeline.
|
||||
* @param moduleTemplates The ingest module templates to be used to
|
||||
* construct the ingest modules for this pipeline.
|
||||
* May be an empty list if this type of pipeline is
|
||||
* not needed for the ingest job.
|
||||
*/
|
||||
FileIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobPipeline, moduleTemplates);
|
||||
this.ingestJobPipeline = ingestJobPipeline;
|
||||
FileIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
|
||||
super(ingestJobExecutor, moduleTemplates);
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
fileBatch = new ArrayList<>();
|
||||
}
|
||||
|
||||
@Override
|
||||
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
|
||||
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> module = Optional.empty();
|
||||
Optional<IngestPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
|
||||
Optional<IngestPipeline.PipelineModule<FileIngestTask>> module = Optional.empty();
|
||||
if (template.isFileIngestModuleTemplate()) {
|
||||
FileIngestModule ingestModule = template.createFileIngestModule();
|
||||
module = Optional.of(new FileIngestPipelineModule(ingestModule, template.getModuleName()));
|
||||
@ -73,18 +75,18 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
}
|
||||
|
||||
@Override
|
||||
void prepareForTask(FileIngestTask task) throws IngestTaskPipelineException {
|
||||
void prepareForTask(FileIngestTask task) throws IngestPipelineException {
|
||||
}
|
||||
|
||||
@Override
|
||||
void cleanUpAfterTask(FileIngestTask task) throws IngestTaskPipelineException {
|
||||
void cleanUpAfterTask(FileIngestTask task) throws IngestPipelineException {
|
||||
try {
|
||||
ingestManager.setIngestTaskProgress(task, SAVE_RESULTS_ACTIVITY);
|
||||
AbstractFile file = task.getFile();
|
||||
file.close();
|
||||
cacheFileForBatchUpdate(file);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new IngestTaskPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS
|
||||
throw new IngestPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS
|
||||
} finally {
|
||||
ingestManager.setIngestTaskProgressCompleted(task);
|
||||
}
|
||||
@ -96,7 +98,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
Date start = new Date();
|
||||
try {
|
||||
updateBatchedFiles();
|
||||
} catch (IngestTaskPipelineException ex) {
|
||||
} catch (IngestPipelineException ex) {
|
||||
errors.add(new IngestModuleError(SAVE_RESULTS_ACTIVITY, ex));
|
||||
}
|
||||
Date finish = new Date();
|
||||
@ -113,9 +115,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
*
|
||||
* @param file The file.
|
||||
*
|
||||
* @throws IngestTaskPipelineException if the case database update fails.
|
||||
* @throws IngestPipelineException if the case database update fails.
|
||||
*/
|
||||
private void cacheFileForBatchUpdate(AbstractFile file) throws IngestTaskPipelineException {
|
||||
private void cacheFileForBatchUpdate(AbstractFile file) throws IngestPipelineException {
|
||||
/*
|
||||
* Only one file ingest thread at a time will try to access the file
|
||||
* cache. The synchronization here is to ensure visibility of the files
|
||||
@ -134,9 +136,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
* Updates the case database with new properties added to the files in the
|
||||
* cache by the ingest modules that processed them.
|
||||
*
|
||||
* @throws IngestTaskPipelineException if the case database update fails.
|
||||
* @throws IngestPipelineException if the case database update fails.
|
||||
*/
|
||||
private void updateBatchedFiles() throws IngestTaskPipelineException {
|
||||
private void updateBatchedFiles() throws IngestPipelineException {
|
||||
/*
|
||||
* Only one file ingest thread at a time will try to access the file
|
||||
* cache. The synchronization here is to ensure visibility of the files
|
||||
@ -146,7 +148,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
synchronized (fileBatch) {
|
||||
CaseDbTransaction transaction = null;
|
||||
try {
|
||||
if (!ingestJobPipeline.isCancelled()) {
|
||||
if (!ingestJobExecutor.isCancelled()) {
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
SleuthkitCase caseDb = currentCase.getSleuthkitCase();
|
||||
transaction = caseDb.beginTransaction();
|
||||
@ -166,7 +168,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
logger.log(Level.SEVERE, "Error rolling back transaction after failure to save updated properties for cached files from tasks", ex1);
|
||||
}
|
||||
}
|
||||
throw new IngestTaskPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS
|
||||
throw new IngestPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS
|
||||
} finally {
|
||||
fileBatch.clear();
|
||||
}
|
||||
@ -177,7 +179,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
* A wrapper that adds ingest infrastructure operations to a file ingest
|
||||
* module.
|
||||
*/
|
||||
static final class FileIngestPipelineModule extends IngestTaskPipeline.PipelineModule<FileIngestTask> {
|
||||
static final class FileIngestPipelineModule extends IngestPipeline.PipelineModule<FileIngestTask> {
|
||||
|
||||
private final FileIngestModule module;
|
||||
|
||||
@ -195,7 +197,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
|
||||
}
|
||||
|
||||
@Override
|
||||
void executeTask(IngestJobPipeline ingestJobPipeline, FileIngestTask task) throws IngestModuleException {
|
||||
void process(IngestJobExecutor ingestJobExecutor, FileIngestTask task) throws IngestModuleException {
|
||||
AbstractFile file = null;
|
||||
try {
|
||||
file = task.getFile();
|
||||
|
@ -25,7 +25,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* A file ingest task that will be executed by an ingest thread using a given
|
||||
* ingest job pipeline.
|
||||
* ingest job executor.
|
||||
*/
|
||||
final class FileIngestTask extends IngestTask {
|
||||
|
||||
@ -34,13 +34,13 @@ final class FileIngestTask extends IngestTask {
|
||||
|
||||
/**
|
||||
* Constructs a file ingest task that will be executed by an ingest thread
|
||||
* using a given ingest job pipeline.
|
||||
* using a given ingest job executor.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline to use to execute the
|
||||
* @param ingestJobPipeline The ingest job executor to use to execute the
|
||||
* task.
|
||||
* @param file The file to be processed.
|
||||
*/
|
||||
FileIngestTask(IngestJobPipeline ingestJobPipeline, AbstractFile file) {
|
||||
FileIngestTask(IngestJobExecutor ingestJobPipeline, AbstractFile file) {
|
||||
super(ingestJobPipeline);
|
||||
this.file = file;
|
||||
fileId = file.getId();
|
||||
@ -48,15 +48,15 @@ final class FileIngestTask extends IngestTask {
|
||||
|
||||
/**
|
||||
* Constructs a file ingest task that will be executed by an ingest thread
|
||||
* using a given ingest job pipeline. This constructor supports streaming
|
||||
* using a given ingest job executor. This constructor supports streaming
|
||||
* ingest by deferring the construction of the AbstractFile object for this
|
||||
* task to conserve heap memory.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline to use to execute the
|
||||
* @param ingestJobPipeline The ingest job executor to use to execute the
|
||||
* task.
|
||||
* @param fileId The object ID of the file to be processed.
|
||||
*/
|
||||
FileIngestTask(IngestJobPipeline ingestJobPipeline, long fileId) {
|
||||
FileIngestTask(IngestJobExecutor ingestJobPipeline, long fileId) {
|
||||
super(ingestJobPipeline);
|
||||
this.fileId = fileId;
|
||||
}
|
||||
@ -88,7 +88,7 @@ final class FileIngestTask extends IngestTask {
|
||||
@Override
|
||||
void execute(long threadId) {
|
||||
super.setThreadId(threadId);
|
||||
getIngestJobPipeline().execute(this);
|
||||
getIngestJobExecutor().execute(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -100,19 +100,19 @@ final class FileIngestTask extends IngestTask {
|
||||
return false;
|
||||
}
|
||||
FileIngestTask other = (FileIngestTask) obj;
|
||||
IngestJobPipeline thisPipeline = getIngestJobPipeline();
|
||||
IngestJobPipeline otherPipeline = other.getIngestJobPipeline();
|
||||
IngestJobExecutor thisPipeline = getIngestJobExecutor();
|
||||
IngestJobExecutor otherPipeline = other.getIngestJobExecutor();
|
||||
if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) {
|
||||
return false;
|
||||
}
|
||||
return (this.fileId == other.fileId);
|
||||
return (getFileId() == other.getFileId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int hash = 5;
|
||||
hash = 47 * hash + Objects.hashCode(getIngestJobPipeline());
|
||||
hash = 47 * hash + Objects.hashCode(this.fileId);
|
||||
hash = 47 * hash + Objects.hashCode(getIngestJobExecutor());
|
||||
hash = 47 * hash + Objects.hashCode(getFileId());
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
@ -28,10 +28,11 @@ import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
|
||||
/**
|
||||
* Analyzes one or more data sources using a set of ingest modules specified via
|
||||
* ingest job settings.
|
||||
* Analyzes a data sources using a set of ingest modules specified via ingest
|
||||
* job settings.
|
||||
*/
|
||||
public final class IngestJob {
|
||||
|
||||
@ -73,17 +74,17 @@ public final class IngestJob {
|
||||
private final List<AbstractFile> files = new ArrayList<>();
|
||||
private final Mode ingestMode;
|
||||
private final IngestJobSettings settings;
|
||||
private volatile IngestJobPipeline ingestJobPipeline;
|
||||
private volatile IngestJobExecutor ingestModuleExecutor;
|
||||
private volatile CancellationReason cancellationReason;
|
||||
|
||||
/**
|
||||
* Constructs a batch mode ingest job that analyzes a data source using a
|
||||
* set of ingest modules specified via ingest job settings. Either all of
|
||||
* the files in the data source or a given subset of the files will be
|
||||
* analyzed.
|
||||
* set of ingest modules specified via ingest job settings.
|
||||
*
|
||||
* @param dataSource The data source to be analyzed.
|
||||
* @param files A subset of the files from the data source.
|
||||
* @param files A subset of the files from the data source to be
|
||||
* analyzed, may be empty if all of the files should be
|
||||
* analyzed.
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
|
||||
@ -91,13 +92,6 @@ public final class IngestJob {
|
||||
this.files.addAll(files);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an ingest job that analyzes a data source using a set of
|
||||
* ingest modules specified via ingest job settings, possibly using an
|
||||
* ingest stream.
|
||||
*
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
/**
|
||||
* Constructs an ingest job that analyzes a data source using a set of
|
||||
* ingest modules specified via ingest job settings, possibly using an
|
||||
@ -108,7 +102,7 @@ public final class IngestJob {
|
||||
* @param settings The ingest job settings.
|
||||
*/
|
||||
IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) {
|
||||
this.id = IngestJob.nextId.getAndIncrement();
|
||||
id = IngestJob.nextId.getAndIncrement();
|
||||
this.dataSource = dataSource;
|
||||
this.settings = settings;
|
||||
this.ingestMode = ingestMode;
|
||||
@ -125,6 +119,15 @@ public final class IngestJob {
|
||||
return this.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the data source to be analyzed by this job.
|
||||
*
|
||||
* @return The data source.
|
||||
*/
|
||||
Content getDataSource() {
|
||||
return dataSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks to see if this ingest job has at least one non-empty ingest module
|
||||
* pipeline.
|
||||
@ -136,31 +139,41 @@ public final class IngestJob {
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a set of files to this ingest job if it is running in streaming
|
||||
* Adds a set of files to this ingest job, if it is running in streaming
|
||||
* ingest mode.
|
||||
*
|
||||
* @param fileObjIds The object IDs of the files.
|
||||
*/
|
||||
void addStreamingIngestFiles(List<Long> fileObjIds) {
|
||||
void addStreamedFiles(List<Long> fileObjIds) {
|
||||
if (ingestMode == Mode.STREAMING) {
|
||||
if (ingestJobPipeline != null) {
|
||||
ingestJobPipeline.addStreamedFiles(fileObjIds);
|
||||
if (ingestModuleExecutor != null) {
|
||||
ingestModuleExecutor.addStreamedFiles(fileObjIds);
|
||||
} else {
|
||||
logger.log(Level.SEVERE, "Attempted to add streamed ingest files with no ingest pipeline");
|
||||
logger.log(Level.SEVERE, "Attempted to add streamed files with no ingest pipeline");
|
||||
}
|
||||
} else {
|
||||
logger.log(Level.SEVERE, "Attempted to add streamed ingest files to batch ingest job");
|
||||
logger.log(Level.SEVERE, "Attempted to add streamed files to batch ingest job");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds one or more data artifacts to this ingest job for processing by its
|
||||
* data artifact ingest modules.
|
||||
*
|
||||
* @param dataArtifacts The data artifacts.
|
||||
*/
|
||||
void addDataArtifacts(List<DataArtifact> dataArtifacts) {
|
||||
ingestModuleExecutor.addDataArtifacts(dataArtifacts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts data source level analysis for this job if it is running in
|
||||
* streaming ingest mode.
|
||||
*/
|
||||
void processStreamingIngestDataSource() {
|
||||
if (ingestMode == Mode.STREAMING) {
|
||||
if (ingestJobPipeline != null) {
|
||||
ingestJobPipeline.addStreamedDataSource();
|
||||
if (ingestModuleExecutor != null) {
|
||||
ingestModuleExecutor.startStreamingModeDataSourceAnalysis();
|
||||
} else {
|
||||
logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline");
|
||||
}
|
||||
@ -176,16 +189,16 @@ public final class IngestJob {
|
||||
* @return A collection of ingest module start up errors, empty on success.
|
||||
*/
|
||||
synchronized List<IngestModuleError> start() throws InterruptedException {
|
||||
if (ingestJobPipeline != null) {
|
||||
if (ingestModuleExecutor != null) {
|
||||
logger.log(Level.SEVERE, "Attempt to start ingest job that has already been started");
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
ingestJobPipeline = new IngestJobPipeline(this, dataSource, files, settings);
|
||||
ingestModuleExecutor = new IngestJobExecutor(this, dataSource, files, settings);
|
||||
List<IngestModuleError> errors = new ArrayList<>();
|
||||
errors.addAll(ingestJobPipeline.startUp());
|
||||
errors.addAll(ingestModuleExecutor.startUp());
|
||||
if (errors.isEmpty()) {
|
||||
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestJobPipeline.getDataSource());
|
||||
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestModuleExecutor.getDataSource());
|
||||
} else {
|
||||
cancel(CancellationReason.INGEST_MODULES_STARTUP_FAILED);
|
||||
}
|
||||
@ -220,7 +233,7 @@ public final class IngestJob {
|
||||
*/
|
||||
public ProgressSnapshot getSnapshot(boolean includeIngestTasksSnapshot) {
|
||||
ProgressSnapshot snapshot = null;
|
||||
if (ingestJobPipeline != null) {
|
||||
if (ingestModuleExecutor != null) {
|
||||
return new ProgressSnapshot(includeIngestTasksSnapshot);
|
||||
}
|
||||
return snapshot;
|
||||
@ -233,8 +246,8 @@ public final class IngestJob {
|
||||
*/
|
||||
Snapshot getDiagnosticStatsSnapshot() {
|
||||
Snapshot snapshot = null;
|
||||
if (ingestJobPipeline != null) {
|
||||
snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(true);
|
||||
if (ingestModuleExecutor != null) {
|
||||
snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(true);
|
||||
}
|
||||
return snapshot;
|
||||
}
|
||||
@ -272,8 +285,8 @@ public final class IngestJob {
|
||||
* ingest manager's ingest jobs list lock.
|
||||
*/
|
||||
new Thread(() -> {
|
||||
if (ingestJobPipeline != null) {
|
||||
ingestJobPipeline.cancel(reason);
|
||||
if (ingestModuleExecutor != null) {
|
||||
ingestModuleExecutor.cancel(reason);
|
||||
}
|
||||
}).start();
|
||||
}
|
||||
@ -284,7 +297,7 @@ public final class IngestJob {
|
||||
* @return The cancellation reason, may be not cancelled.
|
||||
*/
|
||||
public CancellationReason getCancellationReason() {
|
||||
return this.cancellationReason;
|
||||
return cancellationReason;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -294,18 +307,16 @@ public final class IngestJob {
|
||||
* @return True or false.
|
||||
*/
|
||||
public boolean isCancelled() {
|
||||
return (CancellationReason.NOT_CANCELLED != this.cancellationReason);
|
||||
return (CancellationReason.NOT_CANCELLED != cancellationReason);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a callback for the ingest modules pipeline, allowing this ingest
|
||||
* Provides a callback for the ingest module executor, allowing this ingest
|
||||
* job to notify the ingest manager when it is complete.
|
||||
*
|
||||
* @param ingestJobPipeline A completed ingestJobPipeline.
|
||||
*/
|
||||
void notifyIngestPipelineShutDown() {
|
||||
void notifyIngestPipelinesShutDown() {
|
||||
IngestManager ingestManager = IngestManager.getInstance();
|
||||
if (!ingestJobPipeline.isCancelled()) {
|
||||
if (!ingestModuleExecutor.isCancelled()) {
|
||||
ingestManager.fireDataSourceAnalysisCompleted(id, dataSource);
|
||||
} else {
|
||||
IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, dataSource);
|
||||
@ -423,11 +434,7 @@ public final class IngestJob {
|
||||
* stats part of the snapshot.
|
||||
*/
|
||||
private ProgressSnapshot(boolean includeIngestTasksSnapshot) {
|
||||
/*
|
||||
* Note that the getSnapshot() will not construct a ProgressSnapshot
|
||||
* if ingestJobPipeline is null.
|
||||
*/
|
||||
Snapshot snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
|
||||
Snapshot snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
|
||||
dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot);
|
||||
jobCancellationRequested = IngestJob.this.isCancelled();
|
||||
jobCancellationReason = IngestJob.this.getCancellationReason();
|
||||
@ -444,7 +451,7 @@ public final class IngestJob {
|
||||
DataSourceIngestModuleHandle moduleHandle = null;
|
||||
DataSourceIngestPipeline.DataSourcePipelineModule module = dataSourceProcessingSnapshot.getDataSourceLevelIngestModule();
|
||||
if (module != null) {
|
||||
moduleHandle = new DataSourceIngestModuleHandle(ingestJobPipeline, module);
|
||||
moduleHandle = new DataSourceIngestModuleHandle(ingestModuleExecutor, module);
|
||||
}
|
||||
return moduleHandle;
|
||||
}
|
||||
@ -507,7 +514,7 @@ public final class IngestJob {
|
||||
*/
|
||||
public static class DataSourceIngestModuleHandle {
|
||||
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
private final DataSourceIngestPipeline.DataSourcePipelineModule module;
|
||||
private final boolean cancelled;
|
||||
|
||||
@ -516,14 +523,14 @@ public final class IngestJob {
|
||||
* used to get basic information about the module and to request
|
||||
* cancellation of the module.
|
||||
*
|
||||
* @param ingestJobPipeline The ingestJobPipeline that owns the data
|
||||
* @param ingestJobExecutor The ingest job executor that owns the data
|
||||
* source level ingest module.
|
||||
* @param module The data source level ingest module.
|
||||
*/
|
||||
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||
this.ingestJobPipeline = ingestJobPipeline;
|
||||
private DataSourceIngestModuleHandle(IngestJobExecutor ingestJobExecutor, DataSourceIngestPipeline.DataSourcePipelineModule module) {
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
this.module = module;
|
||||
this.cancelled = ingestJobPipeline.currentDataSourceIngestModuleIsCancelled();
|
||||
this.cancelled = ingestJobExecutor.currentDataSourceIngestModuleIsCancelled();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -533,7 +540,7 @@ public final class IngestJob {
|
||||
* @return The display name.
|
||||
*/
|
||||
public String displayName() {
|
||||
return this.module.getDisplayName();
|
||||
return module.getDisplayName();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -543,7 +550,7 @@ public final class IngestJob {
|
||||
* @return The module processing start time.
|
||||
*/
|
||||
public Date startTime() {
|
||||
return this.module.getProcessingStartTime();
|
||||
return module.getProcessingStartTime();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -553,7 +560,7 @@ public final class IngestJob {
|
||||
* @return True or false.
|
||||
*/
|
||||
public boolean isCancelled() {
|
||||
return this.cancelled;
|
||||
return cancelled;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -567,8 +574,8 @@ public final class IngestJob {
|
||||
* could perhaps be solved by adding a cancel() API to the
|
||||
* IngestModule interface.
|
||||
*/
|
||||
if (this.ingestJobPipeline.getCurrentDataSourceIngestModule() == this.module) {
|
||||
this.ingestJobPipeline.cancelCurrentDataSourceIngestModule();
|
||||
if (ingestJobExecutor.getCurrentDataSourceIngestModule() == module) {
|
||||
ingestJobExecutor.cancelCurrentDataSourceIngestModule();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.ingest;
|
||||
import java.util.List;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
|
||||
/**
|
||||
* Provides an ingest module with services specific to the ingest job of which
|
||||
@ -29,16 +28,16 @@ import org.sleuthkit.datamodel.DataArtifact;
|
||||
*/
|
||||
public final class IngestJobContext {
|
||||
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
|
||||
/**
|
||||
* Constructs an ingest job context object that provides an ingest module
|
||||
* with services specific to the ingest job of which the module is a part.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest pipeline for the job.
|
||||
* @param ingestJobExecutor The ingest executor for the job.
|
||||
*/
|
||||
IngestJobContext(IngestJobPipeline ingestJobPipeline) {
|
||||
this.ingestJobPipeline = ingestJobPipeline;
|
||||
IngestJobContext(IngestJobExecutor ingestJobExecutor) {
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -47,7 +46,7 @@ public final class IngestJobContext {
|
||||
* @return The context string.
|
||||
*/
|
||||
public String getExecutionContext() {
|
||||
return ingestJobPipeline.getExecutionContext();
|
||||
return ingestJobExecutor.getExecutionContext();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -56,7 +55,7 @@ public final class IngestJobContext {
|
||||
* @return The data source.
|
||||
*/
|
||||
public Content getDataSource() {
|
||||
return ingestJobPipeline.getDataSource();
|
||||
return ingestJobExecutor.getDataSource();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,7 +64,7 @@ public final class IngestJobContext {
|
||||
* @return The ID.
|
||||
*/
|
||||
public long getJobId() {
|
||||
return ingestJobPipeline.getIngestJobId();
|
||||
return ingestJobExecutor.getIngestJobId();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -79,7 +78,7 @@ public final class IngestJobContext {
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean isJobCancelled() {
|
||||
return ingestJobPipeline.isCancelled();
|
||||
return ingestJobExecutor.isCancelled();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -91,7 +90,7 @@ public final class IngestJobContext {
|
||||
* @return True or false.
|
||||
*/
|
||||
public boolean dataSourceIngestIsCancelled() {
|
||||
return ingestJobPipeline.currentDataSourceIngestModuleIsCancelled() || ingestJobPipeline.isCancelled();
|
||||
return ingestJobExecutor.currentDataSourceIngestModuleIsCancelled() || ingestJobExecutor.isCancelled();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -106,7 +105,7 @@ public final class IngestJobContext {
|
||||
* It is not currently possible to cancel individual file ingest
|
||||
* modules.
|
||||
*/
|
||||
return ingestJobPipeline.isCancelled();
|
||||
return ingestJobExecutor.isCancelled();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -122,7 +121,7 @@ public final class IngestJobContext {
|
||||
* It is not currently possible to cancel individual data artifact
|
||||
* ingest modules.
|
||||
*/
|
||||
return ingestJobPipeline.isCancelled();
|
||||
return ingestJobExecutor.isCancelled();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -132,7 +131,7 @@ public final class IngestJobContext {
|
||||
* @return True or false.
|
||||
*/
|
||||
public boolean processingUnallocatedSpace() {
|
||||
return ingestJobPipeline.shouldProcessUnallocatedSpace();
|
||||
return ingestJobExecutor.shouldProcessUnallocatedSpace();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -146,8 +145,8 @@ public final class IngestJobContext {
|
||||
@Deprecated
|
||||
public void scheduleFiles(List<AbstractFile> files) {
|
||||
addFilesToJob(files);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds one or more files, e.g., extracted or carved files, to the ingest
|
||||
* job for processing by its file ingest modules.
|
||||
@ -155,17 +154,7 @@ public final class IngestJobContext {
|
||||
* @param files The files.
|
||||
*/
|
||||
public void addFilesToJob(List<AbstractFile> files) {
|
||||
ingestJobPipeline.addFiles(files);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds one or more data artifacts to the ingest job for processing by its
|
||||
* data artifact ingest modules.
|
||||
*
|
||||
* @param artifacts The artifacts.
|
||||
*/
|
||||
public void addDataArtifactsToJob(List<DataArtifact> artifacts) {
|
||||
ingestJobPipeline.addDataArtifacts(artifacts);
|
||||
ingestJobExecutor.addFiles(files);
|
||||
}
|
||||
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -56,7 +56,7 @@ class IngestJobInputStream implements IngestStream {
|
||||
if (closed) {
|
||||
throw new IngestStreamClosedException("Can not add files - ingest stream is closed");
|
||||
}
|
||||
ingestJob.addStreamingIngestFiles(fileObjectIds);
|
||||
ingestJob.addStreamedFiles(fileObjectIds);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -34,6 +34,7 @@ import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DataArtifact;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
@ -288,13 +290,103 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
|
||||
/**
|
||||
* Handles artifacts posted events published by the Sleuth Kit layer
|
||||
* blackboard via the event bus for the case database.
|
||||
* blackboard via the Sleuth Kit event bus.
|
||||
*
|
||||
* @param tskEvent A Sleuth Kit data model ArtifactsPostedEvent from the
|
||||
* case database event bus.
|
||||
* @param tskEvent The event.
|
||||
*/
|
||||
@Subscribe
|
||||
void handleArtifactsPosted(Blackboard.ArtifactsPostedEvent tskEvent) {
|
||||
/*
|
||||
* Add any new data artifacts included in the event to the source ingest
|
||||
* job for possible analysis.
|
||||
*/
|
||||
List<DataArtifact> newDataArtifacts = new ArrayList<>();
|
||||
Collection<BlackboardArtifact> newArtifacts = tskEvent.getArtifacts();
|
||||
for (BlackboardArtifact artifact : newArtifacts) {
|
||||
if (artifact instanceof DataArtifact) {
|
||||
newDataArtifacts.add((DataArtifact) artifact);
|
||||
}
|
||||
}
|
||||
if (!newDataArtifacts.isEmpty()) {
|
||||
IngestJob ingestJob = null;
|
||||
Optional<Long> ingestJobId = tskEvent.getIngestJobId();
|
||||
if (ingestJobId.isPresent()) {
|
||||
synchronized (ingestJobsById) {
|
||||
ingestJob = ingestJobsById.get(ingestJobId.get());
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
* There are four use cases where the ingest job ID returned by
|
||||
* the event is expected be null:
|
||||
*
|
||||
* 1. The artifacts are being posted by a data source proccessor
|
||||
* (DSP) module that runs before the ingest job is created,
|
||||
* i.e., a DSP that does not support streaming ingest and has no
|
||||
* noton of an ingest job ID. In this use case, the event is
|
||||
* handled synchronously. The DSP calls
|
||||
* Blackboard.postArtifacts(), which puts the event on the event
|
||||
* bus to which this method subscribes, so the event will be
|
||||
* handled here before the DSP completes and calls
|
||||
* DataSourceProcessorCallback.done(). This means the code below
|
||||
* will execute before the ingest job is created, so it will not
|
||||
* find an ingest job to which to add the artifacts. However,
|
||||
* the artifacts WILL be analyzed after the ingest job is
|
||||
* started, when the ingest job executor, working in batch mode,
|
||||
* schedules ingest tasks for all of the data artifacts in the
|
||||
* case database. There is a slight risk that the wrong ingest
|
||||
* job will be selected if multiple ingests of the same data
|
||||
* source are in progress.
|
||||
*
|
||||
* 2. The artifacts were posted by an ingest module that either
|
||||
* has not been updated to use the current
|
||||
* Blackboard.postArtifacts() API, or is using it incorrectly.
|
||||
* In this use case, the code below should be able to find the
|
||||
* ingest job to which to add the artifacts via their data
|
||||
* source. There is a slight risk that the wrong ingest job will
|
||||
* be selected if multiple ingests of the same data source are
|
||||
* in progress.
|
||||
*
|
||||
* 3. The portable case generator uses a
|
||||
* CommunicationArtifactsHelper constructed with a null ingest
|
||||
* job ID, and the CommunicatonsArtifactHelper posts artifacts.
|
||||
* Ingest of that data source might be running, in which case
|
||||
* the data artifact will be analyzed. It also might be analyzed
|
||||
* by a subsequent ingest job for the data source. This is an
|
||||
* acceptable edge case.
|
||||
*
|
||||
* 4. The user can manually create timeline events with the
|
||||
* timeline tool, which posts the TSK_TL_EVENT data artifacts.
|
||||
* The user selects the data source for these artifacts. Ingest
|
||||
* of that data source might be running, in which case the data
|
||||
* artifact will be analyzed. It also might be analyzed by a
|
||||
* subsequent ingest job for the data source. This is an
|
||||
* acceptable edge case.
|
||||
*/
|
||||
DataArtifact dataArtifact = newDataArtifacts.get(0);
|
||||
try {
|
||||
Content artifactDataSource = dataArtifact.getDataSource();
|
||||
synchronized (ingestJobsById) {
|
||||
for (IngestJob job : ingestJobsById.values()) {
|
||||
Content dataSource = job.getDataSource();
|
||||
if (artifactDataSource.getId() == dataSource.getId()) {
|
||||
ingestJob = job;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to get data source for data artifact (object ID = %d)", dataArtifact.getId()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
if (ingestJob != null) {
|
||||
ingestJob.addDataArtifacts(newDataArtifacts);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Publish Autopsy events for the new artifacts, one event per artifact
|
||||
* type.
|
||||
*/
|
||||
for (BlackboardArtifact.Type artifactType : tskEvent.getArtifactTypes()) {
|
||||
ModuleDataEvent legacyEvent = new ModuleDataEvent(tskEvent.getModuleName(), artifactType, tskEvent.getArtifacts(artifactType));
|
||||
AutopsyEvent autopsyEvent = new BlackboardPostEvent(legacyEvent);
|
||||
@ -825,7 +917,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
*/
|
||||
void setIngestTaskProgress(DataSourceIngestTask task, String currentModuleName) {
|
||||
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
|
||||
IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource());
|
||||
IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
|
||||
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);
|
||||
|
||||
/*
|
||||
@ -847,10 +939,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
|
||||
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
|
||||
IngestThreadActivitySnapshot newSnap;
|
||||
try {
|
||||
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile());
|
||||
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting file from file ingest task", ex);
|
||||
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource());
|
||||
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
|
||||
}
|
||||
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);
|
||||
|
||||
|
@ -33,21 +33,24 @@ import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
|
||||
/**
|
||||
* An abstract superclass for pipelines of ingest modules that execute ingest
|
||||
* tasks for an ingest job. Subclasses need to extend this class and to
|
||||
* implement a specialization of the inner PipelineModule abstract superclass.
|
||||
* An abstract superclass for pipelines of ingest modules that perform the
|
||||
* ingest tasks that make up an ingest job. A pipeline performs a task by
|
||||
* passing it sequentially to the process() method of each module in the
|
||||
* pipeline.
|
||||
*
|
||||
* NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use
|
||||
* by one thread at a time. There are a few status fields that are volatile to
|
||||
* ensure visibility to threads making ingest progress snapshots, but methods
|
||||
* such as startUp(), executeTask() and shutDown() are not synchronized.
|
||||
*
|
||||
* @param <T> The ingest task type.
|
||||
* @param <T> The type of ingest tasks the pipeline performs.
|
||||
*/
|
||||
abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
abstract class IngestPipeline<T extends IngestTask> {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(IngestTaskPipeline.class.getName());
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
/*
|
||||
* NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use
|
||||
* by one thread at a time. There are a few status fields that are volatile
|
||||
* to ensure visibility to threads making ingest progress snapshots, but
|
||||
* methods such as startUp(), performTask() and shutDown() are not
|
||||
* synchronized.
|
||||
*/
|
||||
private static final Logger logger = Logger.getLogger(IngestPipeline.class.getName());
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
private final List<IngestModuleTemplate> moduleTemplates;
|
||||
private final List<PipelineModule<T>> modules;
|
||||
private volatile Date startTime;
|
||||
@ -56,38 +59,34 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
|
||||
/**
|
||||
* Constructs the superclass part of a pipeline of ingest modules that
|
||||
* executes ingest tasks for an ingest job.
|
||||
* performs ingest tasks for an ingest job.
|
||||
*
|
||||
* @param ingestPipeline The parent ingest job pipeline for this ingest
|
||||
* task pipeline.
|
||||
* @param moduleTemplates The ingest module templates that define this
|
||||
* ingest task pipeline. May be an empty list.
|
||||
* @param ingestJobExecutor The ingest job executor for this pipeline.
|
||||
* @param moduleTemplates The ingest module templates to be used to
|
||||
* construct the ingest modules for this pipeline.
|
||||
* May be an empty list if this type of pipeline is
|
||||
* not needed for the ingest job.
|
||||
*/
|
||||
IngestTaskPipeline(IngestJobPipeline ingestPipeline, List<IngestModuleTemplate> moduleTemplates) {
|
||||
this.ingestJobPipeline = ingestPipeline;
|
||||
/*
|
||||
* The creation of ingest modules from the ingest module templates has
|
||||
* been deliberately deferred to the startUp() method so that any and
|
||||
* all errors in module construction or start up can be reported to the
|
||||
* client code.
|
||||
*/
|
||||
IngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
this.moduleTemplates = moduleTemplates;
|
||||
modules = new ArrayList<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Indicates whether or not there are any ingest modules in this ingest task
|
||||
* Indicates whether or not there are any ingest modules in this ingest
|
||||
* pipeline.
|
||||
*
|
||||
* @return True or false.
|
||||
* @return True or false; always true before startUp() is called.
|
||||
*/
|
||||
boolean isEmpty() {
|
||||
return modules.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Queries whether or not this ingest task pipeline is running, i.e., the
|
||||
* startUp() method has been called and the shutDown() has not been called.
|
||||
* Queries whether or not this ingest pipeline is running, i.e., the
|
||||
* startUp() method has been called and the shutDown() method has not been
|
||||
* called yet.
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
@ -96,8 +95,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts up this ingest task pipeline by calling the startUp() methods of
|
||||
* the ingest modules in the pipeline.
|
||||
* Starts up this ingest pipeline by calling the startUp() methods of the
|
||||
* ingest modules in the pipeline.
|
||||
*
|
||||
* @return A list of ingest module start up errors, possibly empty.
|
||||
*/
|
||||
@ -110,21 +109,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
* any and all errors in module construction or start up can be
|
||||
* reported to the client code.
|
||||
*/
|
||||
createIngestModules(moduleTemplates);
|
||||
createIngestModules();
|
||||
errors.addAll(startUpIngestModules());
|
||||
} else {
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline already started"))); //NON-NLS
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline already started"))); //NON-NLS
|
||||
}
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the ingest modules for this ingest task pipeline from the given
|
||||
* ingest module templates.
|
||||
*
|
||||
* @param moduleTemplates The ingest module templates.
|
||||
* Creates the ingest modules for this ingest pipeline using its ingest
|
||||
* module templates.
|
||||
*/
|
||||
private void createIngestModules(List<IngestModuleTemplate> moduleTemplates) {
|
||||
private void createIngestModules() {
|
||||
if (modules.isEmpty()) {
|
||||
for (IngestModuleTemplate template : moduleTemplates) {
|
||||
Optional<PipelineModule<T>> module = acceptModuleTemplate(template);
|
||||
@ -137,8 +134,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
|
||||
/**
|
||||
* Determines if one of the types of ingest modules that can be created from
|
||||
* a given ingest module template should be added to this ingest task
|
||||
* pipeline. If so, the ingest module is created and returned.
|
||||
* a given ingest module template should be added to this ingest pipeline.
|
||||
* If so, the ingest module is created and returned.
|
||||
*
|
||||
* @param template The ingest module template to be used or ignored, as
|
||||
* appropriate to the pipeline type.
|
||||
@ -149,7 +146,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
abstract Optional<PipelineModule<T>> acceptModuleTemplate(IngestModuleTemplate template);
|
||||
|
||||
/**
|
||||
* Starts up the ingest modules in this ingest task pipeline.
|
||||
* Starts up the ingest modules in this ingest pipeline.
|
||||
*
|
||||
* @return A list of ingest module start up errors, possibly empty.
|
||||
*/
|
||||
@ -159,7 +156,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
running = true;
|
||||
for (PipelineModule<T> module : modules) {
|
||||
try {
|
||||
module.startUp(new IngestJobContext(ingestJobPipeline));
|
||||
module.startUp(new IngestJobContext(ingestJobExecutor));
|
||||
} catch (Throwable ex) {
|
||||
/*
|
||||
* A catch-all exception firewall. Start up errors for all of
|
||||
@ -174,10 +171,10 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the start up time of this ingest task pipeline.
|
||||
* Returns the start up time of this ingest pipeline.
|
||||
*
|
||||
* @return The file processing start time, may be null if this pipeline has
|
||||
* not been started yet.
|
||||
* @return The start up time, may be null if this pipeline has not been
|
||||
* started yet.
|
||||
*/
|
||||
Date getStartTime() {
|
||||
Date reportedStartTime = null;
|
||||
@ -188,65 +185,66 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an ingest task by calling the process() methods of the ingest
|
||||
* modules in this ingest task pipeline.
|
||||
* Performs an ingest task by sequentially calling the process() methods of
|
||||
* the ingest modules in this ingest pipeline.
|
||||
*
|
||||
* @param task The task.
|
||||
*
|
||||
* @return A list of ingest module task processing errors, possibly empty.
|
||||
* @return A list of ingest module processing errors, possibly empty.
|
||||
*/
|
||||
List<IngestModuleError> executeTask(T task) {
|
||||
List<IngestModuleError> performTask(T task) {
|
||||
List<IngestModuleError> errors = new ArrayList<>();
|
||||
if (running) {
|
||||
if (!ingestJobPipeline.isCancelled()) {
|
||||
if (!ingestJobExecutor.isCancelled()) {
|
||||
pauseIfScheduled();
|
||||
if (ingestJobPipeline.isCancelled()) {
|
||||
if (ingestJobExecutor.isCancelled()) {
|
||||
return errors;
|
||||
}
|
||||
try {
|
||||
prepareForTask(task);
|
||||
} catch (IngestTaskPipelineException ex) {
|
||||
} catch (IngestPipelineException ex) {
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
|
||||
return errors;
|
||||
}
|
||||
for (PipelineModule<T> module : modules) {
|
||||
pauseIfScheduled();
|
||||
if (ingestJobPipeline.isCancelled()) {
|
||||
if (ingestJobExecutor.isCancelled()) {
|
||||
break;
|
||||
}
|
||||
try {
|
||||
currentModule = module;
|
||||
currentModule.setProcessingStartTime();
|
||||
module.executeTask(ingestJobPipeline, task);
|
||||
} catch (Throwable ex) {
|
||||
module.process(ingestJobExecutor, task);
|
||||
} catch (Throwable ex) { // Catch-all exception firewall
|
||||
/*
|
||||
* A catch-all exception firewall. Note that a runtime
|
||||
* exception from a single module does not stop
|
||||
* Note that an exception from a module does not stop
|
||||
* processing of the task by the other modules in the
|
||||
* pipeline.
|
||||
*/
|
||||
errors.add(new IngestModuleError(module.getDisplayName(), ex));
|
||||
}
|
||||
if (ingestJobPipeline.isCancelled()) {
|
||||
if (ingestJobExecutor.isCancelled()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
cleanUpAfterTask(task);
|
||||
} catch (IngestTaskPipelineException ex) {
|
||||
} catch (IngestPipelineException ex) {
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
|
||||
}
|
||||
} else {
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline not started or shut down"))); //NON-NLS
|
||||
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline not started or shut down"))); //NON-NLS
|
||||
}
|
||||
currentModule = null;
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Pauses task execution if ingest has been configured to be paused weekly
|
||||
* at a specified time for a specified duration.
|
||||
* Pauses this pipeline if ingest has been configured to be paused weekly at
|
||||
* a specified time, for a specified duration. A pipeline can only be paused
|
||||
* between calls to module process() methods, i.e., the individual modules
|
||||
* themselves cannot be paused in the middle of processing a task.
|
||||
*/
|
||||
private void pauseIfScheduled() {
|
||||
if (ScheduledIngestPauseSettings.getPauseEnabled() == true) {
|
||||
@ -278,7 +276,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
*/
|
||||
LocalDateTime timeNow = LocalDateTime.now();
|
||||
if ((timeNow.equals(pauseStart) || timeNow.isAfter(pauseStart)) && timeNow.isBefore(pauseEnd)) {
|
||||
ingestJobPipeline.registerPausedIngestThread(Thread.currentThread());
|
||||
ingestJobExecutor.registerPausedIngestThread(Thread.currentThread());
|
||||
try {
|
||||
long timeRemainingMillis = ChronoUnit.MILLIS.between(timeNow, pauseEnd);
|
||||
logger.log(Level.INFO, String.format("%s pausing at %s for ~%d minutes", Thread.currentThread().getName(), LocalDateTime.now(), TimeUnit.MILLISECONDS.toMinutes(timeRemainingMillis)));
|
||||
@ -287,27 +285,27 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
} catch (InterruptedException notLogged) {
|
||||
logger.log(Level.INFO, String.format("%s resuming at %s due to sleep interrupt (ingest job canceled)", Thread.currentThread().getName(), LocalDateTime.now()));
|
||||
} finally {
|
||||
ingestJobPipeline.unregisterPausedIngestThread(Thread.currentThread());
|
||||
ingestJobExecutor.unregisterPausedIngestThread(Thread.currentThread());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does any task type specific preparation required before executing an
|
||||
* Does any task-type-specific preparation required before performing an
|
||||
* ingest task.
|
||||
*
|
||||
* @param task The task.
|
||||
*
|
||||
* @throws IngestTaskPipelineException Thrown if there is an error preparing
|
||||
* to execute the task.
|
||||
* @throws IngestPipelineException Thrown if there is an error preparing to
|
||||
* perform the task.
|
||||
*/
|
||||
abstract void prepareForTask(T task) throws IngestTaskPipelineException;
|
||||
abstract void prepareForTask(T task) throws IngestPipelineException;
|
||||
|
||||
/**
|
||||
* Gets the currently running ingest module.
|
||||
*
|
||||
* @return The module, possibly null if no module is currently running.
|
||||
* @return The module, possibly null, if no module is currently running.
|
||||
*/
|
||||
PipelineModule<T> getCurrentlyRunningModule() {
|
||||
return currentModule;
|
||||
@ -345,22 +343,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Does any task type specific clean up required after executing an ingest
|
||||
* Does any task-type-specific clean up required after performing an ingest
|
||||
* task.
|
||||
*
|
||||
* @param task The task.
|
||||
*
|
||||
* @throws IngestTaskPipelineException Thrown if there is an error cleaning
|
||||
* up after performing the task.
|
||||
* @throws IngestPipelineException Thrown if there is an error cleaning up
|
||||
* after performing the task.
|
||||
*/
|
||||
abstract void cleanUpAfterTask(T task) throws IngestTaskPipelineException;
|
||||
abstract void cleanUpAfterTask(T task) throws IngestPipelineException;
|
||||
|
||||
/**
|
||||
* An abstract superclass for a decorator that adds ingest infrastructure
|
||||
* operations to an ingest module.
|
||||
*
|
||||
* IMPORTANT: Subclasses of IngestTaskPipeline need to implement a
|
||||
* specialization this class
|
||||
* An abstract superclass for an ingest module decorator that adds ingest
|
||||
* infrastructure operations to an ingest module.
|
||||
*/
|
||||
static abstract class PipelineModule<T extends IngestTask> implements IngestModule {
|
||||
|
||||
@ -369,16 +364,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
private volatile Date processingStartTime;
|
||||
|
||||
/**
|
||||
* Constructs an instance of an abstract superclass for a decorator that
|
||||
* adds ingest infrastructure operations to an ingest module.
|
||||
* Constructs an instance of an abstract superclass for an ingest module
|
||||
* decorator that adds ingest infrastructure operations to an ingest
|
||||
* module.
|
||||
*
|
||||
* @param module The ingest module to be wrapped.
|
||||
* @param module The ingest module to be decorated.
|
||||
* @param displayName The display name for the module.
|
||||
*/
|
||||
PipelineModule(IngestModule module, String displayName) {
|
||||
this.module = module;
|
||||
this.displayName = displayName;
|
||||
this.processingStartTime = new Date();
|
||||
processingStartTime = new Date();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -410,8 +406,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
/**
|
||||
* Gets the the processing start time for the decorated module.
|
||||
*
|
||||
* @return The start time, will be null if the module has not started
|
||||
* processing the data source yet.
|
||||
* @return The start time, not valid if setProcessingStartTime() has not
|
||||
* been called first.
|
||||
*/
|
||||
Date getProcessingStartTime() {
|
||||
return new Date(processingStartTime.getTime());
|
||||
@ -423,17 +419,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes an ingest task using the process() method of the decorated
|
||||
* Performs an ingest task using the process() method of the decorated
|
||||
* module.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline that owns the ingest
|
||||
* task pipeline this module belongs to.
|
||||
* @param task The task to execute.
|
||||
* @param ingestJobExecutor The ingest job executor that owns the ingest
|
||||
* pipeline to which this module belongs.
|
||||
* @param task The task to perform.
|
||||
*
|
||||
* @throws IngestModuleException Exception thrown if there is an error
|
||||
* performing the task.
|
||||
*/
|
||||
abstract void executeTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException;
|
||||
abstract void process(IngestJobExecutor ingestJobExecutor, T task) throws IngestModuleException;
|
||||
|
||||
@Override
|
||||
public void shutDown() {
|
||||
@ -443,28 +439,28 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
|
||||
}
|
||||
|
||||
/**
|
||||
* An exception thrown by an ingest task pipeline.
|
||||
* An exception thrown by an ingest pipeline.
|
||||
*/
|
||||
public static class IngestTaskPipelineException extends Exception {
|
||||
static class IngestPipelineException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Constructs an exception to be thrown by an ingest task pipeline.
|
||||
* Constructs an exception to be thrown by an ingest pipeline.
|
||||
*
|
||||
* @param message The exception message.
|
||||
*/
|
||||
public IngestTaskPipelineException(String message) {
|
||||
IngestPipelineException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs an exception to be thrown by an ingest task pipeline.
|
||||
* Constructs an exception to be thrown by an ingest pipeline.
|
||||
*
|
||||
* @param message The exception message.
|
||||
* @param cause The exception cause.
|
||||
*/
|
||||
public IngestTaskPipelineException(String message, Throwable cause) {
|
||||
IngestPipelineException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
*/
|
||||
public final class IngestServices {
|
||||
|
||||
private static Logger logger = Logger.getLogger(IngestServices.class.getName());
|
||||
private final static Logger logger = Logger.getLogger(IngestServices.class.getName());
|
||||
private static IngestServices instance = null;
|
||||
|
||||
/**
|
||||
@ -115,7 +115,7 @@ public final class IngestServices {
|
||||
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
|
||||
try {
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName());
|
||||
blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName(), null);
|
||||
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to post artifacts", ex);
|
||||
}
|
||||
|
@ -22,37 +22,35 @@ import org.sleuthkit.datamodel.Content;
|
||||
|
||||
/**
|
||||
* An ingest task that will be executed by an ingest thread using a given ingest
|
||||
* job pipeline. Three examples of concrete types of ingest tasks are tasks to
|
||||
* job executor. Three examples of concrete types of ingest tasks are tasks to
|
||||
* analyze a data source, tasks to analyze the files in a data source, and tasks
|
||||
* that analyze data artifacts.
|
||||
* to analyze data artifacts.
|
||||
*/
|
||||
abstract class IngestTask {
|
||||
|
||||
private final static long NOT_SET = Long.MIN_VALUE;
|
||||
private final IngestJobPipeline ingestJobPipeline;
|
||||
private final IngestJobExecutor ingestJobExecutor;
|
||||
private long threadId;
|
||||
|
||||
/**
|
||||
* Constructs an ingest task that will be executed by an ingest thread using
|
||||
* a given ingest job pipeline. Three examples of concrete types of ingest
|
||||
* tasks are tasks to analyze a data source, tasks to analyze the files in a
|
||||
* data source, and tasks that analyze data artifacts.
|
||||
* a given ingest job executor.
|
||||
*
|
||||
* @param ingestJobPipeline The ingest job pipeline to use to execute the
|
||||
* @param ingestJobExecutor The ingest job executor to use to execute the
|
||||
* task.
|
||||
*/
|
||||
IngestTask(IngestJobPipeline ingestJobPipeline) {
|
||||
this.ingestJobPipeline = ingestJobPipeline;
|
||||
IngestTask(IngestJobExecutor ingestJobExecutor) {
|
||||
this.ingestJobExecutor = ingestJobExecutor;
|
||||
threadId = NOT_SET;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the ingest job pipeline used to complete this task.
|
||||
* Gets the ingest job executor to use to execute this task.
|
||||
*
|
||||
* @return The ingest job pipeline.
|
||||
* @return The ingest job executor.
|
||||
*/
|
||||
IngestJobPipeline getIngestJobPipeline() {
|
||||
return ingestJobPipeline;
|
||||
IngestJobExecutor getIngestJobExecutor() {
|
||||
return ingestJobExecutor;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -61,7 +59,7 @@ abstract class IngestTask {
|
||||
* @return The data source.
|
||||
*/
|
||||
Content getDataSource() {
|
||||
return getIngestJobPipeline().getDataSource();
|
||||
return getIngestJobExecutor().getDataSource();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -84,8 +82,8 @@ abstract class IngestTask {
|
||||
|
||||
/**
|
||||
* Records the ingest thread ID of the calling thread and executes this task
|
||||
* using the ingest job pipeline specified when the task was created. The
|
||||
* implementation of the method should simple call
|
||||
* using the ingest job executor specified when the task was created. The
|
||||
* implementation of the method should simply call
|
||||
* super.setThreadId(threadId) and getIngestJobPipeline().process(this).
|
||||
*
|
||||
* @param threadId The numeric ID of the ingest thread executing this task.
|
||||
|
@ -138,7 +138,7 @@ final class IngestTasksScheduler {
|
||||
* task to the pipeline for processing by the
|
||||
* pipeline's ingest modules.
|
||||
*/
|
||||
synchronized void scheduleIngestTasks(IngestJobPipeline ingestPipeline) {
|
||||
synchronized void scheduleIngestTasks(IngestJobExecutor ingestPipeline) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
if (ingestPipeline.hasDataSourceIngestModules()) {
|
||||
scheduleDataSourceIngestTask(ingestPipeline);
|
||||
@ -163,7 +163,7 @@ final class IngestTasksScheduler {
|
||||
* task to the pipeline for processing by the
|
||||
* pipeline's ingest modules.
|
||||
*/
|
||||
synchronized void scheduleDataSourceIngestTask(IngestJobPipeline ingestPipeline) {
|
||||
synchronized void scheduleDataSourceIngestTask(IngestJobExecutor ingestPipeline) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
DataSourceIngestTask task = new DataSourceIngestTask(ingestPipeline);
|
||||
try {
|
||||
@ -190,7 +190,7 @@ final class IngestTasksScheduler {
|
||||
* empty, then all if the files from the data source
|
||||
* are candidates for scheduling.
|
||||
*/
|
||||
synchronized void scheduleFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) {
|
||||
synchronized void scheduleFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
Collection<AbstractFile> candidateFiles;
|
||||
if (files.isEmpty()) {
|
||||
@ -220,7 +220,7 @@ final class IngestTasksScheduler {
|
||||
* processing by the pipeline's ingest modules.
|
||||
* @param files A list of file object IDs for the streamed files.
|
||||
*/
|
||||
synchronized void scheduleStreamedFileIngestTasks(IngestJobPipeline ingestPipeline, List<Long> fileIds) {
|
||||
synchronized void scheduleStreamedFileIngestTasks(IngestJobExecutor ingestPipeline, List<Long> fileIds) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
for (long id : fileIds) {
|
||||
/*
|
||||
@ -252,7 +252,7 @@ final class IngestTasksScheduler {
|
||||
* processing by the pipeline's ingest modules.
|
||||
* @param files The files.
|
||||
*/
|
||||
synchronized void fastTrackFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) {
|
||||
synchronized void fastTrackFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
/*
|
||||
* Put the files directly into the queue for the file ingest
|
||||
@ -290,7 +290,7 @@ final class IngestTasksScheduler {
|
||||
* target Content of the task to the pipeline for
|
||||
* processing by the pipeline's ingest modules.
|
||||
*/
|
||||
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline) {
|
||||
synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
Blackboard blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
|
||||
try {
|
||||
@ -318,7 +318,7 @@ final class IngestTasksScheduler {
|
||||
* source; if empty, then all of the data artifacts
|
||||
* from the data source will be scheduled.
|
||||
*/
|
||||
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline, List<DataArtifact> artifacts) {
|
||||
synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline, List<DataArtifact> artifacts) {
|
||||
if (!ingestPipeline.isCancelled()) {
|
||||
for (DataArtifact artifact : artifacts) {
|
||||
DataArtifactIngestTask task = new DataArtifactIngestTask(ingestPipeline, artifact);
|
||||
@ -373,7 +373,7 @@ final class IngestTasksScheduler {
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
synchronized boolean currentTasksAreCompleted(IngestJobPipeline ingestPipeline) {
|
||||
synchronized boolean currentTasksAreCompleted(IngestJobExecutor ingestPipeline) {
|
||||
long pipelineId = ingestPipeline.getIngestJobId();
|
||||
return !(dataSourceIngestTasksQueue.hasTasksForJob(pipelineId)
|
||||
|| hasTasksForJob(topLevelFileIngestTasksQueue, pipelineId)
|
||||
@ -402,7 +402,7 @@ final class IngestTasksScheduler {
|
||||
*
|
||||
* @param ingestJobPipeline The ingest pipeline for the job.
|
||||
*/
|
||||
synchronized void cancelPendingFileTasksForIngestJob(IngestJobPipeline ingestJobPipeline) {
|
||||
synchronized void cancelPendingFileTasksForIngestJob(IngestJobExecutor ingestJobPipeline) {
|
||||
long jobId = ingestJobPipeline.getIngestJobId();
|
||||
removeTasksForJob(topLevelFileIngestTasksQueue, jobId);
|
||||
removeTasksForJob(batchedFileIngestTasksQueue, jobId);
|
||||
@ -549,7 +549,7 @@ final class IngestTasksScheduler {
|
||||
for (Content child : file.getChildren()) {
|
||||
if (child instanceof AbstractFile) {
|
||||
AbstractFile childFile = (AbstractFile) child;
|
||||
FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobPipeline(), childFile);
|
||||
FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobExecutor(), childFile);
|
||||
if (childFile.hasChildren()) {
|
||||
batchedFileIngestTasksQueue.add(childTask);
|
||||
} else if (shouldEnqueueFileTask(childTask)) {
|
||||
@ -668,7 +668,7 @@ final class IngestTasksScheduler {
|
||||
private static boolean shouldBeCarved(final FileIngestTask task) {
|
||||
try {
|
||||
AbstractFile file = task.getFile();
|
||||
return task.getIngestJobPipeline().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
|
||||
return task.getIngestJobExecutor().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
|
||||
} catch (TskCoreException ex) {
|
||||
return false;
|
||||
}
|
||||
@ -685,7 +685,7 @@ final class IngestTasksScheduler {
|
||||
private static boolean fileAcceptedByFilter(final FileIngestTask task) {
|
||||
try {
|
||||
AbstractFile file = task.getFile();
|
||||
return !(task.getIngestJobPipeline().getFileIngestFilter().fileIsMemberOf(file) == null);
|
||||
return !(task.getIngestJobExecutor().getFileIngestFilter().fileIsMemberOf(file) == null);
|
||||
} catch (TskCoreException ex) {
|
||||
return false;
|
||||
}
|
||||
@ -702,7 +702,7 @@ final class IngestTasksScheduler {
|
||||
*/
|
||||
synchronized private static boolean hasTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
|
||||
for (IngestTask task : tasks) {
|
||||
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
|
||||
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -720,7 +720,7 @@ final class IngestTasksScheduler {
|
||||
Iterator<? extends IngestTask> iterator = tasks.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
IngestTask task = iterator.next();
|
||||
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
|
||||
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
@ -738,7 +738,7 @@ final class IngestTasksScheduler {
|
||||
private static int countTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
|
||||
int count = 0;
|
||||
for (IngestTask task : tasks) {
|
||||
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) {
|
||||
if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2015-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -35,8 +35,9 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Event published when new data is posted to the blackboard of a case. The
|
||||
* "old" value is a legacy ModuleDataEvent object. The "new" value is null.
|
||||
* An event published when a new artifact (data artifact or analysis result) is
|
||||
* posted to the blackboard. The "old" value is a legacy ModuleDataEvent object.
|
||||
* The "new" value is null.
|
||||
*/
|
||||
public final class BlackboardPostEvent extends AutopsyEvent implements Serializable {
|
||||
|
||||
@ -45,14 +46,15 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
|
||||
private transient ModuleDataEvent eventData;
|
||||
|
||||
/**
|
||||
* Constructs an event to be published when new content is added to a case
|
||||
* or there is a change a recorded attribute of existing content.
|
||||
* Constructs an event published when a new artifact (data artifact or
|
||||
* analysis result) is posted to the blackboard. The "old" value is a legacy
|
||||
* ModuleDataEvent object. The "new" value is null.
|
||||
*
|
||||
* @param eventData A ModuleDataEvent object containing the data associated
|
||||
* with the blackboard post.
|
||||
*/
|
||||
public BlackboardPostEvent(ModuleDataEvent eventData) {
|
||||
/**
|
||||
/*
|
||||
* Putting a serializable data holding object into oldValue to allow for
|
||||
* lazy loading of the ModuleDataEvent object for remote events. This
|
||||
* bypasses the issues related to the serialization and de-serialization
|
||||
@ -63,9 +65,9 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
|
||||
IngestManager.IngestModuleEvent.DATA_ADDED.toString(),
|
||||
new SerializableEventData(eventData.getModuleName(), eventData.getBlackboardArtifactType(), eventData.getArtifacts() != null
|
||||
? eventData.getArtifacts()
|
||||
.stream()
|
||||
.map(BlackboardArtifact::getArtifactID)
|
||||
.collect(Collectors.toList()) : Collections.emptyList()),
|
||||
.stream()
|
||||
.map(BlackboardArtifact::getArtifactID)
|
||||
.collect(Collectors.toList()) : Collections.emptyList()),
|
||||
null
|
||||
);
|
||||
this.eventData = eventData;
|
||||
@ -78,13 +80,13 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
|
||||
*/
|
||||
@Override
|
||||
public Object getOldValue() {
|
||||
/**
|
||||
* The eventData field is set in the constructor, but it is transient so
|
||||
* it will become null when the event is serialized for publication over
|
||||
* a network. Doing a lazy load of the ModuleDataEvent object bypasses
|
||||
* the issues related to the serialization and de-serialization of
|
||||
* BlackboardArtifact objects and may also save database round trips
|
||||
* from other nodes since subscribers to this event are often not
|
||||
/*
|
||||
* The eventData field is set in the constructor, but it is transient,
|
||||
* so it will become null when the event is serialized for publication
|
||||
* over a network. Doing a lazy load of the ModuleDataEvent object
|
||||
* bypasses the issues related to the serialization and de-serialization
|
||||
* of BlackboardArtifact objects and may also save database round trips
|
||||
* from other hosts since subscribers to this event are often not
|
||||
* interested in the event data.
|
||||
*/
|
||||
if (null != eventData) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -513,8 +513,7 @@ final class AddLogicalImageTask implements Runnable {
|
||||
|
||||
private void postArtifacts(List<BlackboardArtifact> artifacts) {
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME);
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME, null);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-2019 Basis Technology Corp.
|
||||
* Copyright 2013-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -303,7 +303,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
|
||||
.getAnalysisResult();
|
||||
|
||||
Case.getCurrentCase().getServices().getArtifactsBlackboard()
|
||||
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
|
||||
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName(), context.getJobId());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2020 Basis Technology Corp.
|
||||
* Copyright 2020-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -114,7 +114,7 @@ final class DATExtractor extends DroneExtractor {
|
||||
GeoTrackPoints trackPoints = processCSVFile(context, DATFile, csvFilePath);
|
||||
|
||||
if (trackPoints != null && !trackPoints.isEmpty()) {
|
||||
(new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile)).addTrack(DATFile.getName(), trackPoints, null);
|
||||
(new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile, context.getJobId())).addTrack(DATFile.getName(), trackPoints, null);
|
||||
} else {
|
||||
logger.log(Level.INFO, String.format("No trackpoints with valid longitude or latitude found in %s", DATFile.getName())); //NON-NLS
|
||||
}
|
||||
|
@ -341,7 +341,7 @@ class SevenZipExtractor {
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
|
||||
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
|
||||
@ -870,7 +870,7 @@ class SevenZipExtractor {
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -172,7 +172,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2017-2018 Basis Technology Corp.
|
||||
* Copyright 2017-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -212,7 +212,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -155,7 +155,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
|
||||
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName(), jobId);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-2018 Basis Technology Corp.
|
||||
* Copyright 2013-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -206,7 +206,7 @@ public class FileTypeIdIngestModule implements FileIngestModule {
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
|
||||
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName(), jobId);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
@ -43,13 +42,11 @@ import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.HashHitInfo;
|
||||
import org.sleuthkit.datamodel.HashUtility;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.Score.Significance;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -552,7 +549,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(badFile, moduleName);
|
||||
blackboard.postArtifact(badFile, moduleName, jobId);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-2018 Basis Technology Corp.
|
||||
* Copyright 2014-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -151,7 +151,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
try {
|
||||
|
||||
// Post thet artifact to the blackboard.
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2020 Basis Technology Corp.
|
||||
* Copyright 2020-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -101,7 +101,7 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
}
|
||||
|
||||
try {
|
||||
aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName());
|
||||
aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName(), context);
|
||||
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2020 Basis Technology Corp.
|
||||
* Copyright 2020-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -101,7 +101,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
}
|
||||
|
||||
try {
|
||||
iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName());
|
||||
iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName(), context);
|
||||
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
|
||||
}
|
||||
@ -333,7 +333,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
* added to reports
|
||||
*/
|
||||
private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) {
|
||||
List<String> allIndexFiles = new ArrayList<>();
|
||||
List<String> allIndexFiles;
|
||||
|
||||
try (Stream<Path> walk = Files.walk(iLeappOutputDir)) {
|
||||
|
||||
@ -402,7 +402,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
|
||||
String fileName = FilenameUtils.getName(ffp);
|
||||
String filePath = FilenameUtils.getPath(ffp);
|
||||
|
||||
List<AbstractFile> iLeappFiles = new ArrayList<>();
|
||||
List<AbstractFile> iLeappFiles;
|
||||
try {
|
||||
if (filePath.isEmpty()) {
|
||||
iLeappFiles = fileManager.findFiles(dataSource, fileName); //NON-NLS
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2020 Basis Technology Corp.
|
||||
* Copyright 2020-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -61,6 +61,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.NetworkUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
@ -108,10 +109,10 @@ public final class LeappFileProcessor {
|
||||
* Main constructor.
|
||||
*
|
||||
* @param attributeType The BlackboardAttribute type or null if not
|
||||
* used. used.
|
||||
* @param columnName The name of the column in the tsv file.
|
||||
* @param required Whether or not this attribute is required to be
|
||||
* present.
|
||||
* used. used.
|
||||
* @param columnName The name of the column in the tsv file.
|
||||
* @param required Whether or not this attribute is required to be
|
||||
* present.
|
||||
*/
|
||||
TsvColumn(BlackboardAttribute.Type attributeType, String columnName, boolean required) {
|
||||
this.attributeType = attributeType;
|
||||
@ -144,6 +145,7 @@ public final class LeappFileProcessor {
|
||||
private static final Logger logger = Logger.getLogger(LeappFileProcessor.class.getName());
|
||||
private final String xmlFile; //NON-NLS
|
||||
private final String moduleName;
|
||||
private final IngestJobContext context;
|
||||
|
||||
private final Map<String, String> tsvFiles;
|
||||
private final Map<String, BlackboardArtifact.Type> tsvFileArtifacts;
|
||||
@ -192,15 +194,16 @@ public final class LeappFileProcessor {
|
||||
.put("call history.tsv", "calllog")
|
||||
.build();
|
||||
|
||||
Blackboard blkBoard;
|
||||
private final Blackboard blkBoard;
|
||||
|
||||
public LeappFileProcessor(String xmlFile, String moduleName) throws IOException, IngestModuleException, NoCurrentCaseException {
|
||||
public LeappFileProcessor(String xmlFile, String moduleName, IngestJobContext context) throws IOException, IngestModuleException, NoCurrentCaseException {
|
||||
this.tsvFiles = new HashMap<>();
|
||||
this.tsvFileArtifacts = new HashMap<>();
|
||||
this.tsvFileArtifactComments = new HashMap<>();
|
||||
this.tsvFileAttributes = new HashMap<>();
|
||||
this.xmlFile = xmlFile;
|
||||
this.moduleName = moduleName;
|
||||
this.context = context;
|
||||
|
||||
blkBoard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
|
||||
@ -218,7 +221,8 @@ public final class LeappFileProcessor {
|
||||
"LeappFileProcessor.has.run=Leapp",
|
||||
"LeappFileProcessor.Leapp.cancelled=Leapp run was canceled",
|
||||
"LeappFileProcessor.completed=Leapp Processing Completed",
|
||||
"LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"})
|
||||
"LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"
|
||||
})
|
||||
public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile LeappFile) {
|
||||
try {
|
||||
List<String> LeappTsvOutputFiles = findTsvFiles(moduleOutputPath);
|
||||
@ -249,7 +253,7 @@ public final class LeappFileProcessor {
|
||||
* we know we want to process and return the list to process those files.
|
||||
*/
|
||||
private List<String> findTsvFiles(Path LeappOutputDir) throws IngestModuleException {
|
||||
List<String> allTsvFiles = new ArrayList<>();
|
||||
List<String> allTsvFiles;
|
||||
List<String> foundTsvFiles = new ArrayList<>();
|
||||
|
||||
try (Stream<Path> walk = Files.walk(LeappOutputDir)) {
|
||||
@ -275,7 +279,7 @@ public final class LeappFileProcessor {
|
||||
* Process the Leapp files that were found that match the xml mapping file
|
||||
*
|
||||
* @param LeappFilesToProcess List of files to process
|
||||
* @param LeappImageFile Abstract file to create artifact for
|
||||
* @param LeappImageFile Abstract file to create artifact for
|
||||
*
|
||||
* @throws FileNotFoundException
|
||||
* @throws IOException
|
||||
@ -308,7 +312,7 @@ public final class LeappFileProcessor {
|
||||
* Process the Leapp files that were found that match the xml mapping file
|
||||
*
|
||||
* @param LeappFilesToProcess List of files to process
|
||||
* @param dataSource The data source.
|
||||
* @param dataSource The data source.
|
||||
*
|
||||
* @throws FileNotFoundException
|
||||
* @throws IOException
|
||||
@ -318,7 +322,7 @@ public final class LeappFileProcessor {
|
||||
|
||||
for (String LeappFileName : LeappFilesToProcess) {
|
||||
String fileName = FilenameUtils.getName(LeappFileName);
|
||||
File LeappFile = new File(LeappFileName);
|
||||
File LeappFile = new File(LeappFileName);
|
||||
if (tsvFileAttributes.containsKey(fileName)) {
|
||||
List<TsvColumn> attrList = tsvFileAttributes.get(fileName);
|
||||
BlackboardArtifact.Type artifactType = tsvFileArtifacts.get(fileName);
|
||||
@ -345,12 +349,12 @@ public final class LeappFileProcessor {
|
||||
String trackpointSegmentName = null;
|
||||
GeoTrackPoints pointList = new GeoTrackPoints();
|
||||
AbstractFile geoAbstractFile = null;
|
||||
|
||||
|
||||
if (LeappFile == null || !LeappFile.exists() || fileName == null) {
|
||||
logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile == null ? LeappFile.toString() : "<null>"));
|
||||
logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile != null ? LeappFile.toString() : "<null>"));
|
||||
return;
|
||||
} else if (attrList == null || artifactType == null || dataSource == null) {
|
||||
logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile == null ? LeappFile.toString() : "<null>"));
|
||||
logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile.toString()));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -405,11 +409,10 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase() == "trackpoint") {
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile)).addTrack(trackpointSegmentName, pointList, new ArrayList<>());
|
||||
|
||||
if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase().equals("trackpoint")) {
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile, context.getJobId())).addTrack(trackpointSegmentName, pointList, new ArrayList<>());
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
@ -418,10 +421,9 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact.",
|
||||
"LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact."
|
||||
})
|
||||
|
||||
private void createRoute (Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
|
||||
private void createRoute(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
|
||||
|
||||
Double startLatitude = Double.valueOf(0);
|
||||
Double startLongitude = Double.valueOf(0);
|
||||
@ -433,9 +435,9 @@ public final class LeappFileProcessor {
|
||||
Long dateTime = Long.valueOf(0);
|
||||
Collection<BlackboardAttribute> otherAttributes = new ArrayList<>();
|
||||
String sourceFile = null;
|
||||
AbstractFile absFile = null;
|
||||
AbstractFile absFile;
|
||||
String comment = "";
|
||||
|
||||
|
||||
try {
|
||||
for (BlackboardAttribute bba : bbattributes) {
|
||||
switch (bba.getAttributeType().getTypeName()) {
|
||||
@ -478,19 +480,17 @@ public final class LeappFileProcessor {
|
||||
GeoWaypoints waypointList = new GeoWaypoints();
|
||||
waypointList.addPoint(new Waypoint(startLatitude, startLongitude, zeroValue, ""));
|
||||
waypointList.addPoint(new Waypoint(endLatitude, endLongitude, zeroValue, locationName));
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addRoute(destinationName, dateTime, waypointList, new ArrayList<>());
|
||||
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addRoute(destinationName, dateTime, waypointList, new ArrayList<>());
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_waypoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact.",
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact."
|
||||
})
|
||||
private AbstractFile createTrackpoint(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName, String trackpointSegmentName, GeoTrackPoints pointList) throws IngestModuleException {
|
||||
|
||||
Double latitude = Double.valueOf(0);
|
||||
@ -503,7 +503,7 @@ public final class LeappFileProcessor {
|
||||
String sourceFile = null;
|
||||
String comment = null;
|
||||
AbstractFile absFile = null;
|
||||
|
||||
|
||||
try {
|
||||
for (BlackboardAttribute bba : bbattributes) {
|
||||
switch (bba.getAttributeType().getTypeName()) {
|
||||
@ -538,29 +538,24 @@ public final class LeappFileProcessor {
|
||||
if (absFile == null) {
|
||||
absFile = (AbstractFile) dataSource;
|
||||
}
|
||||
if ((trackpointSegmentName == null) || (trackpointSegmentName == segmentName)) {
|
||||
trackpointSegmentName = segmentName;
|
||||
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
|
||||
if ((trackpointSegmentName == null) || (trackpointSegmentName.equals(segmentName))) {
|
||||
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
|
||||
} else {
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addTrack(segmentName, pointList, new ArrayList<>());
|
||||
trackpointSegmentName = segmentName;
|
||||
pointList = new GeoTrackPoints();
|
||||
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
|
||||
|
||||
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addTrack(segmentName, pointList, new ArrayList<>());
|
||||
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
|
||||
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_trackpoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
return absFile;
|
||||
|
||||
|
||||
return absFile;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship.",
|
||||
"LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship."
|
||||
})
|
||||
|
||||
private void createMessageRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
|
||||
|
||||
String messageType = null;
|
||||
@ -577,7 +572,7 @@ public final class LeappFileProcessor {
|
||||
List<BlackboardAttribute> otherAttributes = new ArrayList<>();
|
||||
List<FileAttachment> fileAttachments = new ArrayList<>();
|
||||
String sourceFile = null;
|
||||
MessageAttachments messageAttachments = null;
|
||||
MessageAttachments messageAttachments;
|
||||
|
||||
try {
|
||||
for (BlackboardAttribute bba : bbattributes) {
|
||||
@ -614,7 +609,7 @@ public final class LeappFileProcessor {
|
||||
sourceFile = bba.getValueString();
|
||||
break;
|
||||
case "TSK_READ_STATUS":
|
||||
if (bba.getValueInt() == 1 ) {
|
||||
if (bba.getValueInt() == 1) {
|
||||
messageStatus = MessageReadStatus.READ;
|
||||
} else {
|
||||
messageStatus = MessageReadStatus.UNREAD;
|
||||
@ -638,22 +633,22 @@ public final class LeappFileProcessor {
|
||||
AbstractFile absFile = findAbstractFile(dataSource, sourceFile);
|
||||
if (absFile == null) {
|
||||
absFile = (AbstractFile) dataSource;
|
||||
}
|
||||
CommunicationArtifactsHelper accountArtifact;
|
||||
Account.Type accountType = getAccountType(fileName);
|
||||
if (alternateId == null) {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType);
|
||||
} else {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId);
|
||||
}
|
||||
BlackboardArtifact messageArtifact = accountArtifact.addMessage(messageType, communicationDirection, senderId,
|
||||
receipentId, dateTime, messageStatus, subject,
|
||||
messageText, threadId, otherAttributes);
|
||||
CommunicationArtifactsHelper accountHelper;
|
||||
Account.Type accountType = getAccountType(fileName);
|
||||
if (alternateId == null) {
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, context.getJobId());
|
||||
} else {
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
|
||||
}
|
||||
BlackboardArtifact messageArtifact = accountHelper.addMessage(messageType, communicationDirection, senderId,
|
||||
receipentId, dateTime, messageStatus, subject,
|
||||
messageText, threadId, otherAttributes);
|
||||
if (!fileAttachments.isEmpty()) {
|
||||
messageAttachments = new MessageAttachments(fileAttachments, new ArrayList<>());
|
||||
accountArtifact.addAttachments(messageArtifact, messageAttachments);
|
||||
accountHelper.addAttachments(messageArtifact, messageAttachments);
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
@ -662,7 +657,7 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship.",
|
||||
"LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship."
|
||||
})
|
||||
private void createContactRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
|
||||
|
||||
@ -715,16 +710,16 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
Account.Type accountType = getAccountType(fileName);
|
||||
if (accountType != null) {
|
||||
|
||||
CommunicationArtifactsHelper accountArtifact;
|
||||
|
||||
CommunicationArtifactsHelper accountHelper;
|
||||
if (alternateId == null) {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType);
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, context.getJobId());
|
||||
} else {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId);
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
|
||||
}
|
||||
BlackboardArtifact messageArtifact = accountArtifact.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes);
|
||||
BlackboardArtifact messageArtifact = accountHelper.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes);
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_contact_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
@ -732,14 +727,13 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship.",
|
||||
"LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship."
|
||||
})
|
||||
|
||||
private void createCalllogRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
|
||||
|
||||
String callerId = null;
|
||||
String alternateId = null;
|
||||
List<String> calleeId = Arrays.asList();
|
||||
List<String> calleeId = Arrays.asList();
|
||||
CommunicationDirection communicationDirection = CommunicationDirection.UNKNOWN;
|
||||
Long startDateTime = Long.valueOf(0);
|
||||
Long endDateTime = Long.valueOf(0);
|
||||
@ -751,14 +745,14 @@ public final class LeappFileProcessor {
|
||||
for (BlackboardAttribute bba : bbattributes) {
|
||||
switch (bba.getAttributeType().getTypeName()) {
|
||||
case "TSK_TEXT_FILE":
|
||||
sourceFile = bba.getValueString();
|
||||
break;
|
||||
sourceFile = bba.getValueString();
|
||||
break;
|
||||
case "TSK_DATETIME_START":
|
||||
startDateTime = bba.getValueLong();
|
||||
break;
|
||||
startDateTime = bba.getValueLong();
|
||||
break;
|
||||
case "TSK_DATETIME_END":
|
||||
startDateTime = bba.getValueLong();
|
||||
break;
|
||||
startDateTime = bba.getValueLong();
|
||||
break;
|
||||
case "TSK_DIRECTION":
|
||||
if (bba.getValueString().toLowerCase().equals("outgoing")) {
|
||||
communicationDirection = CommunicationDirection.OUTGOING;
|
||||
@ -773,8 +767,8 @@ public final class LeappFileProcessor {
|
||||
break;
|
||||
case "TSK_PHONE_NUMBER_TO":
|
||||
if (!bba.getValueString().isEmpty()) {
|
||||
String [] calleeTempList = bba.getValueString().split(",", 0);
|
||||
calleeId = Arrays.asList(calleeTempList);
|
||||
String[] calleeTempList = bba.getValueString().split(",", 0);
|
||||
calleeId = Arrays.asList(calleeTempList);
|
||||
}
|
||||
break;
|
||||
case "TSK_ID":
|
||||
@ -786,32 +780,32 @@ public final class LeappFileProcessor {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING) {
|
||||
String [] calleeTempList = callerId.split(",", 0);
|
||||
calleeId = Arrays.asList(calleeTempList);
|
||||
callerId = null;
|
||||
}
|
||||
|
||||
if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING && callerId != null) {
|
||||
String[] calleeTempList = callerId.split(",", 0);
|
||||
calleeId = Arrays.asList(calleeTempList);
|
||||
callerId = null;
|
||||
}
|
||||
AbstractFile absFile = findAbstractFile(dataSource, sourceFile);
|
||||
if (absFile == null) {
|
||||
absFile = (AbstractFile) dataSource;
|
||||
}
|
||||
Account.Type accountType = getAccountType(fileName);
|
||||
CommunicationArtifactsHelper accountArtifact;
|
||||
CommunicationArtifactsHelper accountHelper;
|
||||
if (accountType != null) {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType);
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, context.getJobId());
|
||||
} else {
|
||||
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId);
|
||||
accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
|
||||
}
|
||||
BlackboardArtifact callLogArtifact = accountArtifact.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes);
|
||||
accountHelper.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes);
|
||||
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
|
||||
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_calllog_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Account.Type getAccountType(String AccountTypeName) {
|
||||
switch (AccountTypeName.toLowerCase()) {
|
||||
case "zapya.tsv":
|
||||
@ -849,7 +843,7 @@ public final class LeappFileProcessor {
|
||||
case "whatsapp - contacts.tsv":
|
||||
return Account.Type.WHATSAPP;
|
||||
case "tangomessages messages.tsv":
|
||||
return Account.Type.TANGO;
|
||||
return Account.Type.TANGO;
|
||||
case "shareit file transfer.tsv":
|
||||
return Account.Type.SHAREIT;
|
||||
case "line - calllogs.tsv":
|
||||
@ -880,20 +874,22 @@ public final class LeappFileProcessor {
|
||||
return Account.Type.PHONE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Process the line read and create the necessary attributes for it.
|
||||
*
|
||||
* @param lineValues List of column values.
|
||||
* @param lineValues List of column values.
|
||||
* @param columnIndexes Mapping of column headers (trimmed; to lower case)
|
||||
* to column index. All header columns and only all header columns should be
|
||||
* present.
|
||||
* @param attrList The list of attributes as specified for the schema of
|
||||
* this file.
|
||||
* @param fileName The name of the file being processed.
|
||||
* @param lineNum The line number in the file.
|
||||
* to column index. All header columns and only all
|
||||
* header columns should be present.
|
||||
* @param attrList The list of attributes as specified for the schema
|
||||
* of this file.
|
||||
* @param fileName The name of the file being processed.
|
||||
* @param lineNum The line number in the file.
|
||||
*
|
||||
* @return The collection of blackboard attributes for the artifact created
|
||||
* from this line.
|
||||
* from this line.
|
||||
*
|
||||
* @throws IngestModuleException
|
||||
*/
|
||||
private Collection<BlackboardAttribute> processReadLine(List<String> lineValues, Map<String, Integer> columnIndexes,
|
||||
@ -930,7 +926,7 @@ public final class LeappFileProcessor {
|
||||
|
||||
String formattedValue = formatValueBasedOnAttrType(colAttr, value);
|
||||
|
||||
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
|
||||
BlackboardAttribute attr = getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
|
||||
if (attr == null) {
|
||||
logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName));
|
||||
return Collections.emptyList();
|
||||
@ -949,9 +945,10 @@ public final class LeappFileProcessor {
|
||||
* Check type of attribute and possibly format string based on it.
|
||||
*
|
||||
* @param colAttr Column Attribute information
|
||||
* @param value string to be formatted
|
||||
* @param value string to be formatted
|
||||
*
|
||||
* @return formatted string based on attribute type if no attribute type
|
||||
* found then return original string
|
||||
* found then return original string
|
||||
*/
|
||||
private String formatValueBasedOnAttrType(TsvColumn colAttr, String value) {
|
||||
if (colAttr.getAttributeType().getTypeName().equals("TSK_DOMAIN")) {
|
||||
@ -971,9 +968,10 @@ public final class LeappFileProcessor {
|
||||
* value.
|
||||
*
|
||||
* @param attrType The attribute type.
|
||||
* @param value The string value to be converted to the appropriate data
|
||||
* type for the attribute type.
|
||||
* @param value The string value to be converted to the appropriate data
|
||||
* type for the attribute type.
|
||||
* @param fileName The file name that the value comes from.
|
||||
*
|
||||
* @return The generated blackboard attribute.
|
||||
*/
|
||||
private BlackboardAttribute getAttribute(BlackboardAttribute.Type attrType, String value, String fileName) {
|
||||
@ -998,7 +996,7 @@ public final class LeappFileProcessor {
|
||||
(v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v).longValue()));
|
||||
case DOUBLE:
|
||||
return parseAttrValue(value.trim(), attrType, fileName, true, false,
|
||||
(v) -> new BlackboardAttribute(attrType, moduleName, (double) Double.valueOf(v)));
|
||||
(v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v)));
|
||||
case BYTE:
|
||||
return parseAttrValue(value.trim(), attrType, fileName, true, false,
|
||||
(v) -> new BlackboardAttribute(attrType, moduleName, new byte[]{Byte.valueOf(v)}));
|
||||
@ -1022,7 +1020,9 @@ public final class LeappFileProcessor {
|
||||
* Handles converting a string value to a blackboard attribute.
|
||||
*
|
||||
* @param orig The original string value.
|
||||
*
|
||||
* @return The generated blackboard attribute.
|
||||
*
|
||||
* @throws ParseException
|
||||
* @throws NumberFormatException
|
||||
*/
|
||||
@ -1033,36 +1033,41 @@ public final class LeappFileProcessor {
|
||||
* Runs parsing function on string value to convert to right data type and
|
||||
* generates a blackboard attribute for that converted data type.
|
||||
*
|
||||
* @param value The string value.
|
||||
* @param attrType The blackboard attribute type.
|
||||
* @param fileName The name of the file from which the value comes.
|
||||
* @param blankIsNull If string is blank return null attribute.
|
||||
* @param zeroIsNull If string is some version of 0, return null attribute.
|
||||
* @param value The string value.
|
||||
* @param attrType The blackboard attribute type.
|
||||
* @param fileName The name of the file from which the value comes.
|
||||
* @param blankIsNull If string is blank return null attribute.
|
||||
* @param zeroIsNull If string is some version of 0, return null
|
||||
* attribute.
|
||||
* @param valueConverter The means of converting the string value to an
|
||||
* appropriate blackboard attribute.
|
||||
* appropriate blackboard attribute.
|
||||
*
|
||||
* @return The generated blackboard attribute or null if not determined.
|
||||
*/
|
||||
private BlackboardAttribute parseAttrValue(String value, BlackboardAttribute.Type attrType, String fileName, boolean blankIsNull, boolean zeroIsNull, ParseExceptionFunction valueConverter) {
|
||||
// remove non-printable characters from tsv input
|
||||
// https://stackoverflow.com/a/6199346
|
||||
value = value.replaceAll("\\p{C}", "");
|
||||
String sanitizedValue = value.replaceAll("\\p{C}", "");
|
||||
|
||||
if (blankIsNull && StringUtils.isBlank(value)) {
|
||||
if (blankIsNull && StringUtils.isBlank(sanitizedValue)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (zeroIsNull && value.matches("^\\s*[0\\.]*\\s*$")) {
|
||||
if (zeroIsNull && sanitizedValue.matches("^\\s*[0\\.]*\\s*$")) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return valueConverter.apply(value);
|
||||
return valueConverter.apply(sanitizedValue);
|
||||
} catch (NumberFormatException | ParseException ex) {
|
||||
logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", value, attrType.getValueType().getLabel(), fileName), ex);
|
||||
logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", sanitizedValue, attrType.getValueType().getLabel(), fileName), ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the XML config file and load the mappings into maps
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.",
|
||||
"LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.",
|
||||
@ -1070,10 +1075,6 @@ public final class LeappFileProcessor {
|
||||
"LeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact",
|
||||
"LeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts."
|
||||
})
|
||||
|
||||
/**
|
||||
* Read the XML config file and load the mappings into maps
|
||||
*/
|
||||
private void loadConfigFile() throws IngestModuleException {
|
||||
Document xmlinput;
|
||||
try {
|
||||
@ -1120,7 +1121,7 @@ public final class LeappFileProcessor {
|
||||
|
||||
BlackboardArtifact.Type foundArtifactType = null;
|
||||
try {
|
||||
foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getArtifactType(artifactName);
|
||||
foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getArtifactType(artifactName);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch artifact type for %s.", artifactName), ex);
|
||||
}
|
||||
@ -1157,7 +1158,7 @@ public final class LeappFileProcessor {
|
||||
for (int k = 0; k < attributeNlist.getLength(); k++) {
|
||||
NamedNodeMap nnm = attributeNlist.item(k).getAttributes();
|
||||
String attributeName = nnm.getNamedItem("attributename").getNodeValue();
|
||||
|
||||
|
||||
if (!attributeName.toLowerCase().matches("null")) {
|
||||
String columnName = nnm.getNamedItem("columnName").getNodeValue();
|
||||
String required = nnm.getNamedItem("required").getNodeValue();
|
||||
@ -1165,7 +1166,7 @@ public final class LeappFileProcessor {
|
||||
|
||||
BlackboardAttribute.Type foundAttrType = null;
|
||||
try {
|
||||
foundAttrType = Case.getCurrentCase().getSleuthkitCase().getAttributeType(attributeName.toUpperCase());
|
||||
foundAttrType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getAttributeType(attributeName.toUpperCase());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch attribute type for %s.", attributeName), ex);
|
||||
}
|
||||
@ -1181,10 +1182,13 @@ public final class LeappFileProcessor {
|
||||
|
||||
if (columnName == null) {
|
||||
logger.log(Level.SEVERE, String.format("No column name provided for [%s]", getXmlAttrIdentifier(parentName, attributeName)));
|
||||
continue;
|
||||
} else if (columnName.trim().length() != columnName.length()) {
|
||||
logger.log(Level.SEVERE, String.format("Column name '%s' starts or ends with whitespace for [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
|
||||
continue;
|
||||
} else if (columnName.matches("[^ \\S]")) {
|
||||
logger.log(Level.SEVERE, String.format("Column name '%s' contains invalid characters [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
|
||||
continue;
|
||||
}
|
||||
|
||||
TsvColumn thisCol = new TsvColumn(
|
||||
@ -1209,11 +1213,12 @@ public final class LeappFileProcessor {
|
||||
/**
|
||||
* Generic method for creating a blackboard artifact with attributes
|
||||
*
|
||||
* @param artType The artifact type.
|
||||
* @param dataSource is the Content object that needs to have the artifact
|
||||
* added for it
|
||||
* @param artType The artifact type.
|
||||
* @param dataSource is the Content object that needs to have the artifact
|
||||
* added for it
|
||||
* @param bbattributes is the collection of blackboard attributes that need
|
||||
* to be added to the artifact after the artifact has been created
|
||||
* to be added to the artifact after the artifact has
|
||||
* been created
|
||||
*
|
||||
* @return The newly-created artifact, or null on error
|
||||
*/
|
||||
@ -1225,7 +1230,7 @@ public final class LeappFileProcessor {
|
||||
case ANALYSIS_RESULT:
|
||||
return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult();
|
||||
default:
|
||||
logger.log(Level.SEVERE, "Unknown category type: " + artType.getCategory().getDisplayName());
|
||||
logger.log(Level.SEVERE, String.format("Unknown category type: %s", artType.getCategory().getDisplayName()));
|
||||
return null;
|
||||
}
|
||||
} catch (TskException ex) {
|
||||
@ -1238,7 +1243,7 @@ public final class LeappFileProcessor {
|
||||
* Method to post a list of BlackboardArtifacts to the blackboard.
|
||||
*
|
||||
* @param artifacts A list of artifacts. IF list is empty or null, the
|
||||
* function will return.
|
||||
* function will return.
|
||||
*/
|
||||
void postArtifacts(Collection<BlackboardArtifact> artifacts) {
|
||||
if (artifacts == null || artifacts.isEmpty()) {
|
||||
@ -1246,7 +1251,7 @@ public final class LeappFileProcessor {
|
||||
}
|
||||
|
||||
try {
|
||||
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName);
|
||||
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, Bundle.LeappFileProcessor_postartifacts_error(), ex); //NON-NLS
|
||||
}
|
||||
@ -1259,7 +1264,7 @@ public final class LeappFileProcessor {
|
||||
*/
|
||||
private void configExtractor() throws IOException {
|
||||
PlatformUtil.extractResourceToUserConfigDir(LeappFileProcessor.class,
|
||||
xmlFile, true);
|
||||
xmlFile, true);
|
||||
}
|
||||
|
||||
private static final Set<String> ALLOWED_EXTENSIONS = new HashSet<>(Arrays.asList("zip", "tar", "tgz"));
|
||||
@ -1316,14 +1321,14 @@ public final class LeappFileProcessor {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private AbstractFile findAbstractFile(Content dataSource, String fileNamePath) {
|
||||
if (fileNamePath == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
List<AbstractFile> files;
|
||||
|
||||
|
||||
String fileName = FilenameUtils.getName(fileNamePath);
|
||||
String filePath = FilenameUtils.normalize(FilenameUtils.getPath(fileNamePath), true);
|
||||
|
||||
@ -1347,4 +1352,4 @@ public final class LeappFileProcessor {
|
||||
return null;
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ public class EXIFProcessor implements PictureProcessor {
|
||||
artifacts.add(userSuspectedArtifact);
|
||||
|
||||
try {
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME);
|
||||
blackboard.postArtifacts(artifacts, MODULE_NAME, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error posting TSK_METADATA_EXIF and TSK_USER_CONTENT_SUSPECTED artifacts for %s (object ID = %d)", file.getName(), file.getId()), ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -352,7 +352,7 @@ public class PlasoIngestModule implements DataSourceIngestModule {
|
||||
* keyword search, and fire an event to notify UI of
|
||||
* this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(bbart, MODULE_NAME);
|
||||
blackboard.postArtifact(bbart, MODULE_NAME, context.getJobId());
|
||||
} catch (BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2020 Basis Technology Corp.
|
||||
* Copyright 2020-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -159,7 +159,7 @@ public class YaraIngestModule extends FileIngestModuleAdapter {
|
||||
|
||||
if(!artifacts.isEmpty()) {
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName());
|
||||
blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName(), context.getJobId());
|
||||
}
|
||||
|
||||
} catch (BlackboardException | NoCurrentCaseException | IngestModuleException | TskCoreException | YaraWrapperException ex) {
|
||||
|
@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.ImageUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.report.ReportProgressPanel;
|
||||
import static org.sleuthkit.autopsy.casemodule.services.TagsManager.getNotableTagLabel;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
|
||||
@ -358,7 +357,7 @@ class TableReportGenerator {
|
||||
// Give the modules the rows for the content tags.
|
||||
for (ContentTag tag : tags) {
|
||||
try {
|
||||
if(shouldFilterFromReport(tag.getContent())) {
|
||||
if (shouldFilterFromReport(tag.getContent())) {
|
||||
continue;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
@ -366,7 +365,7 @@ class TableReportGenerator {
|
||||
logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
// skip tags that we are not reporting on
|
||||
String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : "";
|
||||
if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) {
|
||||
@ -451,15 +450,15 @@ class TableReportGenerator {
|
||||
// Give the modules the rows for the content tags.
|
||||
for (BlackboardArtifactTag tag : tags) {
|
||||
try {
|
||||
if(shouldFilterFromReport(tag.getContent())) {
|
||||
if (shouldFilterFromReport(tag.getContent())) {
|
||||
continue;
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
} catch (TskCoreException ex) {
|
||||
errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBArtifactTags"));
|
||||
logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : "";
|
||||
if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) {
|
||||
continue;
|
||||
@ -813,7 +812,7 @@ class TableReportGenerator {
|
||||
AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId);
|
||||
if (f != null) {
|
||||
uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath();
|
||||
if(shouldFilterFromReport(f)) {
|
||||
if (shouldFilterFromReport(f)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -973,7 +972,7 @@ class TableReportGenerator {
|
||||
AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId);
|
||||
if (f != null) {
|
||||
uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath();
|
||||
if(shouldFilterFromReport(f)) {
|
||||
if (shouldFilterFromReport(f)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -1217,11 +1216,11 @@ class TableReportGenerator {
|
||||
private List<ArtifactData> getFilteredArtifacts(BlackboardArtifact.Type type, HashSet<String> tagNamesFilter) {
|
||||
List<ArtifactData> artifacts = new ArrayList<>();
|
||||
try {
|
||||
for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifacts(type.getTypeID())) {
|
||||
if(shouldFilterFromReport(artifact)) {
|
||||
for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getArtifacts(Collections.singletonList(type), settings.getSelectedDataSources())) {
|
||||
if (shouldFilterFromReport(artifact)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
List<BlackboardArtifactTag> tags = Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact);
|
||||
HashSet<String> uniqueTagNames = new HashSet<>();
|
||||
for (BlackboardArtifactTag tag : tags) {
|
||||
@ -1232,7 +1231,7 @@ class TableReportGenerator {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardAttributes(artifact), uniqueTagNames));
|
||||
artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getBlackboardAttributes(artifact), uniqueTagNames));
|
||||
} catch (TskCoreException ex) {
|
||||
errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBAttribs"));
|
||||
logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); //NON-NLS
|
||||
@ -1339,7 +1338,7 @@ class TableReportGenerator {
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH)));
|
||||
|
||||
columns.add(new AttributeColumn(NbBundle.getMessage(this.getClass(), "ReportGenerator.artTableColHdr.dateTime"),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED )));
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED)));
|
||||
|
||||
attributeTypeSet.remove(new Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
|
||||
} else if (BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG.getTypeID() == artifactTypeId) {
|
||||
@ -1817,19 +1816,19 @@ class TableReportGenerator {
|
||||
return "";
|
||||
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Indicates if the content should be filtered from the report.
|
||||
*/
|
||||
private boolean shouldFilterFromReport(Content content) throws TskCoreException {
|
||||
if(this.settings.getSelectedDataSources() == null) {
|
||||
if (this.settings.getSelectedDataSources() == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
if (content.getDataSource() == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
long dataSourceId = content.getDataSource().getId();
|
||||
return !this.settings.getSelectedDataSources().contains(dataSourceId);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019-2020 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -354,7 +354,7 @@ public class PortableCaseReportModule implements ReportModule {
|
||||
}
|
||||
for (BlackboardAttribute.ATTRIBUTE_TYPE type : BlackboardAttribute.ATTRIBUTE_TYPE.values()) {
|
||||
try {
|
||||
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel()));
|
||||
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getBlackboard().getAttributeType(type.getLabel()));
|
||||
} catch (TskCoreException ex) {
|
||||
handleError("Error looking up attribute name " + type.getLabel(),
|
||||
Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()),
|
||||
@ -1084,7 +1084,7 @@ public class PortableCaseReportModule implements ReportModule {
|
||||
return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID());
|
||||
}
|
||||
|
||||
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName());
|
||||
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getBlackboard().getArtifactType(oldArtifact.getArtifactTypeName());
|
||||
try {
|
||||
BlackboardArtifact.Type newCustomType = portableSkCase.getBlackboard().getOrAddArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName());
|
||||
oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID());
|
||||
@ -1424,7 +1424,7 @@ public class PortableCaseReportModule implements ReportModule {
|
||||
|
||||
// Add the attachment. The account type specified in the constructor will not be used.
|
||||
CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(),
|
||||
newSourceStr, newFile, Account.Type.EMAIL);
|
||||
newSourceStr, newFile, Account.Type.EMAIL, null);
|
||||
communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments()));
|
||||
}
|
||||
catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2017-2020 Basis Technology Corp.
|
||||
* Copyright 2017-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -23,7 +23,6 @@ import java.util.List;
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
import org.joda.time.DateTime;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -87,6 +86,7 @@ final class CustomArtifactType {
|
||||
* blackboard.
|
||||
*
|
||||
* @param source The artifact source content.
|
||||
* @param ingestJobId The ingest job ID.
|
||||
*
|
||||
* @return A BlackboardArtifact object.
|
||||
*
|
||||
@ -95,7 +95,7 @@ final class CustomArtifactType {
|
||||
* @throws Blackboard.BlackboardException If there is an error posting the
|
||||
* artifact to the blackboard.
|
||||
*/
|
||||
static BlackboardArtifact createAndPostInstance(Content source) throws TskCoreException, Blackboard.BlackboardException {
|
||||
static BlackboardArtifact createAndPostInstance(Content source, long ingestJobId) throws TskCoreException, Blackboard.BlackboardException {
|
||||
List<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0));
|
||||
attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0));
|
||||
@ -131,7 +131,7 @@ final class CustomArtifactType {
|
||||
}
|
||||
|
||||
Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard();
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
blackboard.postArtifact(artifact, MODULE_NAME, ingestJobId);
|
||||
|
||||
return artifact;
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2017-2020 Basis Technology Corp.
|
||||
* Copyright 2017-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -38,7 +38,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceIngestModuleAdapter {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorDataSourceIngestModule.class.getName());
|
||||
|
||||
private IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Adds the custom artifact type this module uses to the case database of
|
||||
* the current case.
|
||||
@ -51,6 +52,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
|
||||
*/
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
try {
|
||||
CustomArtifactType.addToCaseDatabase();
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
@ -70,7 +72,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
|
||||
@Override
|
||||
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
try {
|
||||
CustomArtifactType.createAndPostInstance(dataSource);
|
||||
CustomArtifactType.createAndPostInstance(dataSource, context.getJobId());
|
||||
} catch (TskCoreException | Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to process data source (obj_id = %d)", dataSource.getId()), ex);
|
||||
return ProcessResult.ERROR;
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2017-2020 Basis Technology Corp.
|
||||
* Copyright 2017-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -37,6 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapter {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorFileIngestModule.class.getName());
|
||||
private IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Adds the custom artifact type this module uses to the case database of
|
||||
@ -50,6 +51,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
|
||||
*/
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
try {
|
||||
CustomArtifactType.addToCaseDatabase();
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
@ -71,7 +73,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
try {
|
||||
CustomArtifactType.createAndPostInstance(file);
|
||||
CustomArtifactType.createAndPostInstance(file, context.getJobId());
|
||||
} catch (TskCoreException | Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex);
|
||||
return ProcessResult.ERROR;
|
||||
|
@ -25,6 +25,7 @@ import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobStartResult;
|
||||
@ -53,7 +54,7 @@ public final class IngestJobRunner {
|
||||
*/
|
||||
public static List<IngestModuleError> runIngestJob(Collection<Content> dataSources, IngestJobSettings settings) throws InterruptedException {
|
||||
Object ingestMonitor = new Object();
|
||||
IngestJobCompletiontListener completiontListener = new IngestJobCompletiontListener(ingestMonitor);
|
||||
IngestJobCompletionListener completiontListener = new IngestJobCompletionListener(ingestMonitor, dataSources.size());
|
||||
IngestManager ingestManager = IngestManager.getInstance();
|
||||
ingestManager.addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, completiontListener);
|
||||
try {
|
||||
@ -81,9 +82,12 @@ public final class IngestJobRunner {
|
||||
* An ingest job event listener that allows IngestRunner.runIngestJob to
|
||||
* block until the specified ingest job is completed.
|
||||
*/
|
||||
private static final class IngestJobCompletiontListener implements PropertyChangeListener {
|
||||
private static final class IngestJobCompletionListener implements PropertyChangeListener {
|
||||
|
||||
private final Object ingestMonitor;
|
||||
|
||||
@GuardedBy("ingestMonitor")
|
||||
private int remainingJobsCount;
|
||||
|
||||
/**
|
||||
* Constructs an ingest job event listener that allows
|
||||
@ -92,9 +96,11 @@ public final class IngestJobRunner {
|
||||
*
|
||||
* @param ingestMonitor A Java object to notify when the ingest job is
|
||||
* omcpleted.
|
||||
* @param jobsCount The number of jobs to listen for before notifying monitor.
|
||||
*/
|
||||
IngestJobCompletiontListener(Object ingestMonitor) {
|
||||
IngestJobCompletionListener(Object ingestMonitor, int jobsCount) {
|
||||
this.ingestMonitor = ingestMonitor;
|
||||
this.remainingJobsCount = jobsCount;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -109,7 +115,10 @@ public final class IngestJobRunner {
|
||||
String eventType = event.getPropertyName();
|
||||
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
|
||||
synchronized (ingestMonitor) {
|
||||
ingestMonitor.notify();
|
||||
this.remainingJobsCount--;
|
||||
if (this.remainingJobsCount <= 0) {
|
||||
ingestMonitor.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ public class AddManualEvent extends Action {
|
||||
BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null);
|
||||
|
||||
try {
|
||||
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
|
||||
sleuthkitCase.getBlackboard().postArtifact(artifact, source, null);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.sleuthkit.autopsy.commonpropertiessearch;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -88,6 +89,26 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
this.utils.tearDown();
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that the given file appears a precise number times in the given
|
||||
* data source.
|
||||
*
|
||||
* @param searchDomain search domain
|
||||
* @param objectIdToDataSourceMap mapping of file ids to data source names
|
||||
* @param fileName name of file to search for
|
||||
* @param dataSource name of data source where file should
|
||||
* appear
|
||||
* @param instanceCount number of appearances of the given file
|
||||
*
|
||||
* @return true if a file with the given name exists the specified number of
|
||||
* times in the given data source
|
||||
*/
|
||||
static void assertInstanceExistenceAndCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource, int instanceCount) {
|
||||
int foundCount = IntraCaseTestUtils.getInstanceCount(searchDomain, objectIdToDataSourceMap, fileName, dataSource);
|
||||
String errorMessage = MessageFormat.format("Expected to find {0} matches for {1} in {2} but found {3}.", instanceCount, fileName, dataSource, foundCount);
|
||||
assertEquals(errorMessage, instanceCount, foundCount);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all matches & all file types. Confirm file.jpg is found on all three
|
||||
* and file.docx is found on two.
|
||||
@ -103,25 +124,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = IntraCaseTestUtils.getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -144,25 +165,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -185,25 +206,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -227,25 +248,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -269,25 +290,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -311,25 +332,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -353,25 +374,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -394,25 +415,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
@ -435,25 +456,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
|
||||
|
||||
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
|
||||
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0));
|
||||
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0));
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
|
||||
assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
|
||||
|
||||
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
|
@ -179,6 +179,37 @@ class IntraCaseTestUtils {
|
||||
return tally == instanceCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that the given file appears a precise number times in the given
|
||||
* data source.
|
||||
*
|
||||
* @param searchDomain search domain
|
||||
* @param objectIdToDataSourceMap mapping of file ids to data source names
|
||||
* @param fileName name of file to search for
|
||||
* @param dataSource name of data source where file should appear
|
||||
* @param instanceCount number of appearances of the given file
|
||||
* @return The count of items found.
|
||||
*/
|
||||
static int getInstanceCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource) {
|
||||
|
||||
int tally = 0;
|
||||
|
||||
for (AbstractFile file : searchDomain) {
|
||||
|
||||
Long objectId = file.getId();
|
||||
|
||||
String name = file.getName();
|
||||
|
||||
String dataSourceName = objectIdToDataSourceMap.get(objectId);
|
||||
|
||||
if (name.equalsIgnoreCase(fileName) && dataSourceName.equalsIgnoreCase(dataSource)) {
|
||||
tally++;
|
||||
}
|
||||
}
|
||||
|
||||
return tally;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience method which verifies that a file exists within a given data
|
||||
* source exactly once.
|
||||
|
@ -143,7 +143,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
private static final int COMPLETED_TIME_COL_PREFERRED_WIDTH = 280;
|
||||
private static final String UPDATE_TASKS_THREAD_NAME = "AID-update-tasks-%d";
|
||||
private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName();
|
||||
private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice";
|
||||
private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice";
|
||||
private static final Logger sysLogger = AutoIngestSystemLogger.getLogger();
|
||||
private static AutoIngestControlPanel instance;
|
||||
private final DefaultTableModel pendingTableModel;
|
||||
@ -160,7 +160,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
* Maintain a mapping of each service to it's last status update.
|
||||
*/
|
||||
private final ConcurrentHashMap<String, String> statusByService;
|
||||
|
||||
|
||||
/*
|
||||
* The enum is used in conjunction with the DefaultTableModel class to
|
||||
* provide table models for the JTables used to display a view of the
|
||||
@ -177,7 +177,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage",
|
||||
"# {0} - unitSeparator",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder",
|
||||
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?",
|
||||
@ -193,7 +194,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")),
|
||||
COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")),
|
||||
STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")),
|
||||
STAGE_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime")),
|
||||
STAGE_TIME(Bundle.AutoIngestControlPanel_JobsTableModel_ColumnHeader_StageTime(DurationCellRenderer.getUnitSeperator())),
|
||||
STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")),
|
||||
CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")),
|
||||
IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")),
|
||||
@ -250,7 +251,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
* controlling automated ingest for a single node within the cluster.
|
||||
*/
|
||||
private AutoIngestControlPanel() {
|
||||
|
||||
|
||||
this.statusByService = new ConcurrentHashMap<>();
|
||||
|
||||
//Disable the main window so they can only use the dashboard (if we used setVisible the taskBar icon would go away)
|
||||
@ -290,10 +291,10 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
* Update status of the services on the dashboard
|
||||
*/
|
||||
private void displayServicesStatus() {
|
||||
tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message",
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()),
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
|
||||
tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message",
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()),
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
|
||||
statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()),
|
||||
statusByService.get(ServicesMonitor.Service.MESSAGING.toString())));
|
||||
String upStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up");
|
||||
if (statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()).compareTo(upStatus) != 0
|
||||
@ -304,7 +305,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
tbServicesStatusMessage.setForeground(Color.BLACK);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Queries the services monitor and sets the text for the services status
|
||||
* text box.
|
||||
@ -411,7 +412,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
column.setMaxWidth(PRIORITY_COLUMN_MAX_WIDTH);
|
||||
column.setPreferredWidth(PRIORITY_COLUMN_PREFERRED_WIDTH);
|
||||
column.setWidth(PRIORITY_COLUMN_PREFERRED_WIDTH);
|
||||
|
||||
|
||||
column = pendingTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader());
|
||||
column.setCellRenderer(new OcrIconCellRenderer());
|
||||
column.setMaxWidth(OCR_COLUMN_MAX_WIDTH);
|
||||
@ -469,7 +470,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader()));
|
||||
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader()));
|
||||
runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader()));
|
||||
|
||||
|
||||
/*
|
||||
* Set up a column to display the cases associated with the jobs.
|
||||
*/
|
||||
@ -566,7 +567,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader()));
|
||||
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader()));
|
||||
completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader()));
|
||||
|
||||
|
||||
/*
|
||||
* Set up a column to display the cases associated with the jobs.
|
||||
*/
|
||||
@ -617,7 +618,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
column.setMaxWidth(STATUS_COL_MAX_WIDTH);
|
||||
column.setPreferredWidth(STATUS_COL_PREFERRED_WIDTH);
|
||||
column.setWidth(STATUS_COL_PREFERRED_WIDTH);
|
||||
|
||||
|
||||
/*
|
||||
* Set up a column to display OCR enabled/disabled flag.
|
||||
*/
|
||||
@ -732,30 +733,30 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
}
|
||||
|
||||
PropertyChangeListener propChangeListener = (PropertyChangeEvent evt) -> {
|
||||
|
||||
|
||||
String serviceDisplayName = ServicesMonitor.Service.valueOf(evt.getPropertyName()).toString();
|
||||
String status = evt.getNewValue().toString();
|
||||
|
||||
|
||||
if (status.equals(ServicesMonitor.ServiceStatus.UP.toString())) {
|
||||
status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up");
|
||||
} else if (status.equals(ServicesMonitor.ServiceStatus.DOWN.toString())) {
|
||||
status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down");
|
||||
sysLogger.log(Level.SEVERE, "Connection to {0} is down", serviceDisplayName); //NON-NLS
|
||||
}
|
||||
|
||||
|
||||
// if the status update is for an existing service who's status hasn't changed - do nothing.
|
||||
if (statusByService.containsKey(serviceDisplayName) && status.equals(statusByService.get(serviceDisplayName))) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
statusByService.put(serviceDisplayName, status);
|
||||
displayServicesStatus();
|
||||
};
|
||||
|
||||
|
||||
// Subscribe to all multi-user services in order to display their status
|
||||
Set<String> servicesList = new HashSet<>();
|
||||
servicesList.add(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString());
|
||||
servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString());
|
||||
servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString());
|
||||
servicesList.add(ServicesMonitor.Service.MESSAGING.toString());
|
||||
ServicesMonitor.getInstance().addSubscriber(servicesList, propChangeListener);
|
||||
|
||||
@ -879,7 +880,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
|
||||
case JOB_COMPLETED:
|
||||
case CASE_DELETED:
|
||||
case REPROCESS_JOB:
|
||||
case OCR_STATE_CHANGE:
|
||||
case OCR_STATE_CHANGE:
|
||||
updateExecutor.submit(new UpdateAllJobsTablesTask());
|
||||
break;
|
||||
case PAUSED_BY_USER_REQUEST:
|
||||
|
@ -53,7 +53,8 @@ final class AutoIngestJobsNode extends AbstractNode {
|
||||
"AutoIngestJobsNode.dataSource.text=Data Source",
|
||||
"AutoIngestJobsNode.hostName.text=Host Name",
|
||||
"AutoIngestJobsNode.stage.text=Stage",
|
||||
"AutoIngestJobsNode.stageTime.text=Time in Stage",
|
||||
"# {0} - unitSeparator",
|
||||
"AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)",
|
||||
"AutoIngestJobsNode.jobCreated.text=Job Created",
|
||||
"AutoIngestJobsNode.jobCompleted.text=Job Completed",
|
||||
"AutoIngestJobsNode.priority.text=Prioritized",
|
||||
@ -345,8 +346,10 @@ final class AutoIngestJobsNode extends AbstractNode {
|
||||
jobWrapper.getProcessingHostName()));
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
|
||||
status.getDescription()));
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(),
|
||||
DurationCellRenderer.longToDurationString((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime()))));
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
|
||||
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
|
||||
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
|
||||
DurationCellRenderer.longToDurationString(Date.from(Instant.now()).getTime() - status.getStartDate().getTime())));
|
||||
break;
|
||||
case COMPLETED_JOB:
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(),
|
||||
@ -356,7 +359,7 @@ final class AutoIngestJobsNode extends AbstractNode {
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(),
|
||||
jobWrapper.getErrorsOccurred() ? StatusIconCellRenderer.Status.WARNING : StatusIconCellRenderer.Status.OK));
|
||||
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(),
|
||||
jobWrapper.getOcrEnabled()));
|
||||
jobWrapper.getOcrEnabled()));
|
||||
break;
|
||||
default:
|
||||
}
|
||||
@ -377,7 +380,7 @@ final class AutoIngestJobsNode extends AbstractNode {
|
||||
PrioritizationAction.DeprioritizeCaseAction deprioritizeCaseAction = new PrioritizationAction.DeprioritizeCaseAction(jobWrapper.getJob());
|
||||
deprioritizeCaseAction.setEnabled(jobWrapper.getPriority() > 0);
|
||||
actions.add(deprioritizeCaseAction);
|
||||
|
||||
|
||||
actions.add(new AutoIngestAdminActions.EnableOCR(jobWrapper.getJob()));
|
||||
AutoIngestAdminActions.DisableOCR disableOCRAction = new AutoIngestAdminActions.DisableOCR(jobWrapper.getJob());
|
||||
disableOCRAction.setEnabled(jobWrapper.getOcrEnabled() == true);
|
||||
|
@ -31,6 +31,7 @@ import org.sleuthkit.autopsy.datamodel.EmptyNode;
|
||||
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.AutoIngestJobStatus;
|
||||
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.JobNode;
|
||||
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestNodeRefreshEvents.AutoIngestRefreshEvent;
|
||||
import org.sleuthkit.autopsy.guiutils.DurationCellRenderer;
|
||||
import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer;
|
||||
|
||||
/**
|
||||
@ -64,6 +65,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
|
||||
customize();
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Set up the AutoIngestJobsPanel's so that its outlineView is displaying
|
||||
* the correct columns for the specified AutoIngestJobStatus
|
||||
@ -99,7 +102,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
|
||||
outlineView.setPropertyColumns(Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_dataSource_text(),
|
||||
Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(),
|
||||
Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
|
||||
Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text());
|
||||
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
|
||||
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()));
|
||||
indexOfColumn = getColumnIndexByName(Bundle.AutoIngestJobsNode_caseName_text());
|
||||
if (indexOfColumn != INVALID_INDEX) {
|
||||
outline.setColumnSorted(indexOfColumn, true, 1);
|
||||
@ -124,7 +128,7 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
|
||||
if (indexOfColumn != INVALID_INDEX) {
|
||||
outline.getColumnModel().getColumn(indexOfColumn).setPreferredWidth(INITIAL_OCR_WIDTH);
|
||||
outline.getColumnModel().getColumn(indexOfColumn).setCellRenderer(new OcrIconCellRenderer());
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
}
|
||||
@ -177,8 +181,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
|
||||
* Update the contents of this AutoIngestJobsPanel while retaining currently
|
||||
* selected node.
|
||||
*
|
||||
* @param refreshEvent - the AutoIngestRefreshEvent which will provide the new
|
||||
* contents
|
||||
* @param refreshEvent - the AutoIngestRefreshEvent which will provide the
|
||||
* new contents
|
||||
*/
|
||||
void refresh(AutoIngestRefreshEvent refreshEvent) {
|
||||
synchronized (this) {
|
||||
@ -191,7 +195,6 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
|
||||
}
|
||||
outline.setRowSelectionAllowed(true);
|
||||
outline.setFocusable(true);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -78,7 +78,8 @@ AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath=\ Manifest F
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage
|
||||
# {0} - unitSeparator
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started
|
||||
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status
|
||||
AutoIngestControlPanel.OK=OK
|
||||
@ -140,7 +141,8 @@ AutoIngestJobsNode.prioritized.false=No
|
||||
AutoIngestJobsNode.prioritized.true=Yes
|
||||
AutoIngestJobsNode.priority.text=Prioritized
|
||||
AutoIngestJobsNode.stage.text=Stage
|
||||
AutoIngestJobsNode.stageTime.text=Time in Stage
|
||||
# {0} - unitSeparator
|
||||
AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)
|
||||
AutoIngestJobsNode.status.text=Status
|
||||
AutoIngestJobsPanel.waitNode.text=Please Wait...
|
||||
AutoIngestMetricsDialog.initReportText=Select a date above and click the 'Generate Metrics Report' button to generate\na metrics report.
|
||||
|
@ -1084,13 +1084,13 @@ final class FileExportRuleSet implements Serializable, Comparable<FileExportRule
|
||||
SleuthkitCase caseDb = currentCase.getSleuthkitCase();
|
||||
BlackboardArtifact.Type artifactType;
|
||||
try {
|
||||
artifactType = caseDb.getArtifactType(artifactTypeName);
|
||||
artifactType = caseDb.getBlackboard().getArtifactType(artifactTypeName);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new ExportRulesException(String.format("The specified %s artifact type does not exist in case database for %s", artifactTypeName, currentCase.getCaseDirectory()), ex);
|
||||
}
|
||||
BlackboardAttribute.Type attributeType;
|
||||
try {
|
||||
attributeType = caseDb.getAttributeType(attributeTypeName);
|
||||
attributeType = caseDb.getBlackboard().getAttributeType(attributeTypeName);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new ExportRulesException(String.format("The specified %s attribute type does not exist in case database for %s", attributeTypeName, currentCase.getCaseDirectory()), ex);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -177,7 +177,7 @@ public class ObjectDetectectionFileIngestModule extends FileIngestModuleAdapter
|
||||
/*
|
||||
* Index the artifact for keyword search.
|
||||
*/
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
blackboard.postArtifact(artifact, MODULE_NAME, jobId);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Copyright 2018-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -390,7 +390,7 @@ class VolatilityProcessor {
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(volArtifact, VOLATILITY);
|
||||
blackboard.postArtifact(volArtifact, VOLATILITY, null);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName));
|
||||
/*
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule):
|
||||
|
||||
# Create a GeoArtifactsHelper for this file.
|
||||
geoArtifactHelper = GeoArtifactsHelper(
|
||||
self.skCase, self.moduleName, None, file)
|
||||
self.skCase, self.moduleName, None, file, context.getJobId())
|
||||
|
||||
if self.writeDebugMsgs:
|
||||
self.log(Level.INFO, "Processing " + file.getUniquePath() +
|
||||
@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule):
|
||||
|
||||
art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
|
||||
|
||||
self.blackboard.postArtifact(art, self.moduleName)
|
||||
self.blackboard.postArtifact(art, self.moduleName, context.getJobId())
|
||||
|
||||
except Blackboard.BlackboardException as e:
|
||||
self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " +
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -104,9 +104,8 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
# NOTE: originally commented out
|
||||
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME)
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -102,9 +102,8 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy))
|
||||
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence))
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME)
|
||||
blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -83,12 +83,12 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
|
||||
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
callLogDb.getDBFile(),
|
||||
Account.Type.PHONE, Account.Type.PHONE, selfAccountId )
|
||||
Account.Type.PHONE, Account.Type.PHONE, selfAccountId, context.getJobId())
|
||||
else:
|
||||
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
callLogDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
Account.Type.PHONE, context.getJobId())
|
||||
|
||||
for tableName in CallLogAnalyzer._tableNames:
|
||||
try:
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -75,7 +75,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
||||
return
|
||||
for contactDb in contactsDbs:
|
||||
try:
|
||||
self.__findContactsInDB(contactDb, dataSource)
|
||||
self.__findContactsInDB(contactDb, dataSource, context)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
@ -86,7 +86,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
||||
"""
|
||||
Queries the given contact database and adds Contacts to the case.
|
||||
"""
|
||||
def __findContactsInDB(self, contactDb, dataSource):
|
||||
def __findContactsInDB(self, contactDb, dataSource, context):
|
||||
if not contactDb:
|
||||
return
|
||||
|
||||
@ -97,7 +97,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
||||
contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
contactDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
Account.Type.PHONE, context.getJobId())
|
||||
|
||||
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
|
||||
# sorted by name, so phonenumber/email would be consecutive for a person if they exist.
|
||||
@ -158,7 +158,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
|
||||
phoneNumber, # phoneNumber,
|
||||
None, # homePhoneNumber,
|
||||
None, # mobilePhoneNumber,
|
||||
emailAddr) # emailAddr
|
||||
emailAddr, context.getJobId()) # emailAddr
|
||||
|
||||
except SQLException as ex:
|
||||
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -148,11 +148,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
|
||||
if self.selfAccountId is not None:
|
||||
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, contactsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
|
||||
else:
|
||||
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, contactsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK)
|
||||
Account.Type.FACEBOOK, context.getJobId())
|
||||
|
||||
## get the other contacts/friends
|
||||
contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0")
|
||||
@ -492,11 +492,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
|
||||
if self.selfAccountId is not None:
|
||||
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, threadsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId )
|
||||
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
|
||||
else:
|
||||
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, threadsDb.getDBFile(),
|
||||
Account.Type.FACEBOOK)
|
||||
Account.Type.FACEBOOK, context.getJobId())
|
||||
|
||||
self.analyzeMessages(threadsDb, threadsDBHelper)
|
||||
self.analyzeCallLogs(threadsDb, threadsDBHelper)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -76,7 +76,7 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
try:
|
||||
jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
|
||||
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
|
||||
self.__findGeoLocationsInDB(jFile.toString(), abstractFile)
|
||||
self.__findGeoLocationsInDB(jFile.toString(), abstractFile, context)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
@ -84,13 +84,13 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
|
||||
# Error finding Google map locations.
|
||||
pass
|
||||
|
||||
def __findGeoLocationsInDB(self, databasePath, abstractFile):
|
||||
def __findGeoLocationsInDB(self, databasePath, abstractFile, context):
|
||||
if not databasePath:
|
||||
return
|
||||
|
||||
try:
|
||||
artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile)
|
||||
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile, context.getJobId())
|
||||
Class.forName("org.sqlite.JDBC") # load JDBC driver
|
||||
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
|
||||
statement = connection.createStatement()
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -109,12 +109,12 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO, Account.Type.IMO, selfAccountId )
|
||||
Account.Type.IMO, Account.Type.IMO, selfAccountId, context.getJobId())
|
||||
else:
|
||||
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
friendsDb.getDBFile(),
|
||||
Account.Type.IMO )
|
||||
Account.Type.IMO, context.getJobId())
|
||||
contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends")
|
||||
if contactsResultSet is not None:
|
||||
while contactsResultSet.next():
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -75,7 +75,7 @@ class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
try:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, libraryDb.getDBFile())
|
||||
self._MODULE_NAME, libraryDb.getDBFile(), context.getJobId())
|
||||
queryString = "SELECT doc_id, purchase_time FROM ownership"
|
||||
ownershipResultSet = libraryDb.runQuery(queryString)
|
||||
if ownershipResultSet is not None:
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -117,7 +117,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_message_db.getDBFile(), Account.Type.LINE)
|
||||
contact_and_message_db.getDBFile(), Account.Type.LINE, context.getJobId())
|
||||
self.parse_contacts(contact_and_message_db, helper)
|
||||
self.parse_messages(contact_and_message_db, helper, current_case)
|
||||
|
||||
@ -125,7 +125,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_db.getDBFile(), Account.Type.LINE)
|
||||
calllog_db.getDBFile(), Account.Type.LINE, context.getJobId())
|
||||
self.parse_calllogs(dataSource, calllog_db, helper)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -89,7 +89,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, cookiesDb.getDBFile())
|
||||
self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
@ -119,7 +119,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile())
|
||||
self._MODULE_NAME, historyDb.getDBFile(), context.getJobId())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
@ -148,7 +148,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, downloadsDb.getDBFile())
|
||||
self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
@ -177,7 +177,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, autofillDb.getDBFile())
|
||||
self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId())
|
||||
autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill")
|
||||
if autofillsResultSet is not None:
|
||||
while autofillsResultSet.next():
|
||||
@ -205,7 +205,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile())
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId())
|
||||
queryString = """
|
||||
SELECT street_address, city, state, zipcode, country_code,
|
||||
date_modified, first_name, last_name, number, email
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2018 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -72,7 +72,7 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
|
||||
skCase = Case.getCurrentCase().getSleuthkitCase()
|
||||
geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile())
|
||||
geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile(), context.getJobId())
|
||||
|
||||
poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois"
|
||||
poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString)
|
||||
@ -96,9 +96,8 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
|
||||
artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
|
||||
|
||||
try:
|
||||
# index the artifact for keyword search
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
|
||||
blackboard.postArtifact(artifact, self._MODULE_NAME)
|
||||
blackboard.postArtifact(artifact, self._MODULE_NAME, context.getJobId())
|
||||
except Blackboard.BlackboardException as ex:
|
||||
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -87,7 +87,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for sbrowserDb in sbrowserDbs:
|
||||
try:
|
||||
sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, sbrowserDb.getDBFile())
|
||||
self._MODULE_NAME, sbrowserDb.getDBFile(), context.getJobId())
|
||||
bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL")
|
||||
if bookmarkResultSet is not None:
|
||||
while bookmarkResultSet.next():
|
||||
@ -115,7 +115,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for cookiesDb in cookiesDbs:
|
||||
try:
|
||||
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, cookiesDb.getDBFile())
|
||||
self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId())
|
||||
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
|
||||
if cookiesResultSet is not None:
|
||||
while cookiesResultSet.next():
|
||||
@ -145,7 +145,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for historyDb in historyDbs:
|
||||
try:
|
||||
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile())
|
||||
self._MODULE_NAME, historyDb.getDBFile(), context.getJobId())
|
||||
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
|
||||
if historyResultSet is not None:
|
||||
while historyResultSet.next():
|
||||
@ -174,7 +174,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for downloadsDb in downloadsDbs:
|
||||
try:
|
||||
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, downloadsDb.getDBFile())
|
||||
self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId())
|
||||
queryString = "SELECT target_path, start_time, url FROM downloads"\
|
||||
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
|
||||
downloadsResultSet = downloadsDb.runQuery(queryString)
|
||||
@ -203,7 +203,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for autofillDb in autofillDbs:
|
||||
try:
|
||||
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, autofillDb.getDBFile())
|
||||
self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId())
|
||||
queryString = """
|
||||
SELECT name, value, count, date_created
|
||||
FROM autofill
|
||||
@ -236,7 +236,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
|
||||
for webFormAddressDb in webFormAddressDbs:
|
||||
try:
|
||||
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile())
|
||||
self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId())
|
||||
"""
|
||||
Autofill form data is split across multiple tables. The quqery below joins the various tables.
|
||||
"""
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -85,7 +85,7 @@ class ShareItAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, historyDb.getDBFile(),
|
||||
Account.Type.SHAREIT)
|
||||
Account.Type.SHAREIT, context.getJobId())
|
||||
|
||||
queryString = """
|
||||
SELECT history_type, device_id, device_name, description, timestamp, file_path
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -129,13 +129,13 @@ class SkypeAnalyzer(general.AndroidComponentAnalyzer):
|
||||
if user_account_instance is None:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE
|
||||
skype_db.getDBFile(), Account.Type.SKYPE, context.getJobId()
|
||||
)
|
||||
else:
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
skype_db.getDBFile(), Account.Type.SKYPE,
|
||||
Account.Type.SKYPE, user_account_instance
|
||||
Account.Type.SKYPE, user_account_instance, context.getJobId()
|
||||
)
|
||||
self.parse_contacts(skype_db, helper)
|
||||
self.parse_calllogs(skype_db, helper)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -72,7 +72,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
tangoDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "tc.db", True, self._PACKAGE_NAME)
|
||||
for tangoDbFile in tangoDbFiles:
|
||||
try:
|
||||
self.__findTangoMessagesInDB(tangoDbFile, dataSource)
|
||||
self.__findTangoMessagesInDB(tangoDbFile, dataSource, context)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
@ -80,7 +80,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
# Error finding Tango messages.
|
||||
pass
|
||||
|
||||
def __findTangoMessagesInDB(self, tangoDb, dataSource):
|
||||
def __findTangoMessagesInDB(self, tangoDb, dataSource, context):
|
||||
if not tangoDb:
|
||||
return
|
||||
|
||||
@ -91,7 +91,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
tangoDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
tangoDb.getDBFile(),
|
||||
Account.Type.TANGO )
|
||||
Account.Type.TANGO, context.getJobId())
|
||||
|
||||
resultSet = tangoDb.runQuery(
|
||||
"SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;")
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -80,12 +80,12 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
messageDb.getDBFile(),
|
||||
Account.Type.PHONE, Account.Type.IMO, selfAccountId )
|
||||
Account.Type.PHONE, Account.Type.IMO, selfAccountId, context.getJobId())
|
||||
else:
|
||||
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
messageDb.getDBFile(),
|
||||
Account.Type.PHONE )
|
||||
Account.Type.PHONE, context.getJobId())
|
||||
|
||||
uuid = UUID.randomUUID().toString()
|
||||
messagesResultSet = messageDb.runQuery("SELECT address, date, read, type, subject, body, thread_id FROM sms;")
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -91,7 +91,7 @@ class TextNowAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
textnow_db.getDBFile(), Account.Type.TEXTNOW
|
||||
textnow_db.getDBFile(), Account.Type.TEXTNOW, context.getJobId()
|
||||
)
|
||||
self.parse_contacts(textnow_db, helper)
|
||||
self.parse_calllogs(textnow_db, helper)
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -91,7 +91,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_and_calllog_db.getDBFile(), Account.Type.VIBER)
|
||||
contact_and_calllog_db.getDBFile(), Account.Type.VIBER, context.getJobId())
|
||||
self.parse_contacts(contact_and_calllog_db, helper)
|
||||
self.parse_calllogs(contact_and_calllog_db, helper)
|
||||
|
||||
@ -100,7 +100,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
message_db.getDBFile(), Account.Type.VIBER)
|
||||
message_db.getDBFile(), Account.Type.VIBER, context.getJobId())
|
||||
self.parse_messages(message_db, helper, current_case)
|
||||
|
||||
except NoCurrentCaseException as ex:
|
||||
@ -131,9 +131,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
|
||||
attributes = ArrayList()
|
||||
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name()))
|
||||
artifact = contacts_db.getDBFile().newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes)
|
||||
|
||||
# Post the artifact to blackboard
|
||||
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME)
|
||||
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME, context.getJobId())
|
||||
|
||||
contacts_parser.close()
|
||||
except SQLException as ex:
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -145,14 +145,14 @@ class WhatsAppAnalyzer(general.AndroidComponentAnalyzer):
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
contact_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
contact_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId())
|
||||
self.parse_contacts(contact_db, helper)
|
||||
|
||||
for calllog_and_message_db in calllog_and_message_dbs:
|
||||
current_case = Case.getCurrentCaseThrows()
|
||||
helper = CommunicationArtifactsHelper(
|
||||
current_case.getSleuthkitCase(), self._PARSER_NAME,
|
||||
calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP)
|
||||
calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId())
|
||||
self.parse_calllogs(calllog_and_message_db, helper)
|
||||
self.parse_messages(dataSource, calllog_and_message_db, helper, current_case)
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2016-2020 Basis Technology Corp.
|
||||
Copyright 2016-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -78,7 +78,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
wwfDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "WordsFramework", True, self._PACKAGE_NAME)
|
||||
for wwfDbFile in wwfDbFiles:
|
||||
try:
|
||||
self.__findWWFMessagesInDB(wwfDbFile, dataSource)
|
||||
self.__findWWFMessagesInDB(wwfDbFile, dataSource, context)
|
||||
except Exception as ex:
|
||||
self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex)
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
@ -88,7 +88,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
self._logger.log(Level.SEVERE, traceback.format_exc())
|
||||
pass
|
||||
|
||||
def __findWWFMessagesInDB(self, wwfDb, dataSource):
|
||||
def __findWWFMessagesInDB(self, wwfDb, dataSource, context):
|
||||
if not wwfDb:
|
||||
return
|
||||
|
||||
@ -98,7 +98,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
|
||||
wwfDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._PARSER_NAME,
|
||||
wwfDb.getDBFile(),
|
||||
wwfAccountType )
|
||||
wwfAccountType, context.getJobId())
|
||||
|
||||
uuid = UUID.randomUUID().toString()
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -91,11 +91,11 @@ class XenderAnalyzer(general.AndroidComponentAnalyzer):
|
||||
if selfAccountId is not None:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER, Account.Type.XENDER, selfAccountId )
|
||||
Account.Type.XENDER, Account.Type.XENDER, selfAccountId, context.getJobId())
|
||||
else:
|
||||
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transactionDb.getDBFile(),
|
||||
Account.Type.XENDER)
|
||||
Account.Type.XENDER, context.getJobId())
|
||||
|
||||
queryString = """
|
||||
SELECT f_path, f_display_name, f_size_str, c_start_time, c_direction, c_session_id,
|
||||
|
@ -1,7 +1,7 @@
|
||||
"""
|
||||
Autopsy Forensic Browser
|
||||
|
||||
Copyright 2019-2020 Basis Technology Corp.
|
||||
Copyright 2019-2021 Basis Technology Corp.
|
||||
Contact: carrier <at> sleuthkit <dot> org
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -81,7 +81,7 @@ class ZapyaAnalyzer(general.AndroidComponentAnalyzer):
|
||||
#
|
||||
transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
|
||||
self._MODULE_NAME, transferDb.getDBFile(),
|
||||
Account.Type.ZAPYA)
|
||||
Account.Type.ZAPYA, context.getJobId())
|
||||
|
||||
queryString = "SELECT device, name, direction, createtime, path, title FROM transfer"
|
||||
transfersResultSet = transferDb.runQuery(queryString)
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -426,7 +426,7 @@ class AdHocSearchChildFactory extends ChildFactory<KeyValue> {
|
||||
final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr;
|
||||
try {
|
||||
progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true));
|
||||
hits.process(progress, null, this, false, saveResults);
|
||||
hits.process(progress, null, this, false, saveResults, null);
|
||||
} finally {
|
||||
finalizeWorker();
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014 - 2017 Basis Technology Corp.
|
||||
* Copyright 2014 - 2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -564,7 +564,7 @@ final class IngestSearchRunner {
|
||||
subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress);
|
||||
|
||||
// Create blackboard artifacts
|
||||
newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true);
|
||||
newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true, job.getJobId());
|
||||
|
||||
} //if has results
|
||||
|
||||
|
@ -648,7 +648,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
|
||||
}
|
||||
if (!bbartifacts.isEmpty()) {
|
||||
try {
|
||||
Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName);
|
||||
Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName, jobId);
|
||||
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
|
||||
// Log error and return to continue processing
|
||||
logger.log(Level.WARNING, String.format("Unable to post blackboard artifacts for file $s.", aFile.getParentPath() + aFile.getName()), ex); //NON-NLS
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -35,7 +35,8 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -64,7 +65,7 @@ class QueryResults {
|
||||
* and publishing an event to notify subscribers of the blackboard posts.
|
||||
*
|
||||
* The KeywordSearchQuery is used to do the blackboard posts.
|
||||
*
|
||||
*
|
||||
* @param query The query.
|
||||
*/
|
||||
QueryResults(KeywordSearchQuery query) {
|
||||
@ -141,9 +142,10 @@ class QueryResults {
|
||||
* messages inbox if there is a keyword hit in the text
|
||||
* exrtacted from the text source object.
|
||||
* @param saveResults Flag whether to save search results as KWS artifacts.
|
||||
*
|
||||
* @param ingestJobId The numeric identifier of the ingest job within which
|
||||
* the artifacts are being created, may be null.
|
||||
*/
|
||||
void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<?, ?> worker, boolean notifyInbox, boolean saveResults) {
|
||||
void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<?, ?> worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) {
|
||||
/*
|
||||
* Initialize the progress indicator to the number of keywords that will
|
||||
* be processed.
|
||||
@ -218,15 +220,15 @@ class QueryResults {
|
||||
} catch (TskCoreException | NoCurrentCaseException tskCoreException) {
|
||||
logger.log(Level.SEVERE, "Failed to get text source object for keyword hit", tskCoreException); //NON-NLS
|
||||
}
|
||||
|
||||
|
||||
if ((content != null) && saveResults) {
|
||||
/*
|
||||
* Post an artifact for the hit to the blackboard.
|
||||
* Post an artifact for the hit to the blackboard.
|
||||
*/
|
||||
BlackboardArtifact artifact = query.createKeywordHitArtifact(content, keyword, hit, snippet, query.getKeywordList().getName());
|
||||
|
||||
/*
|
||||
* Send an ingest inbox message for the hit.
|
||||
* Send an ingest inbox message for the hit.
|
||||
*/
|
||||
if (null != artifact) {
|
||||
hitArtifacts.add(artifact);
|
||||
@ -253,7 +255,7 @@ class QueryResults {
|
||||
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
|
||||
Blackboard blackboard = tskCase.getBlackboard();
|
||||
|
||||
blackboard.postArtifacts(hitArtifacts, MODULE_NAME);
|
||||
blackboard.postArtifacts(hitArtifacts, MODULE_NAME, ingestJobId);
|
||||
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ OpenIDE-Module-Display-Category=Ingest Module
|
||||
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
|
||||
OpenIDE-Module-Name=RecentActivity
|
||||
OpenIDE-Module-Short-Description=Recent Activity finder ingest module
|
||||
Chrome.moduleName=Chromium
|
||||
Chrome.moduleName=Chromium Analyzer
|
||||
Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files.
|
||||
Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files.
|
||||
Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1}
|
||||
@ -19,7 +19,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f
|
||||
Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
|
||||
Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files.
|
||||
Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
|
||||
ExtractIE.moduleName.text=Internet Explorer
|
||||
ExtractIE.moduleName.text=Internet Explorer Analyzer
|
||||
ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks.
|
||||
ExtractIE.parentModuleName.noSpace=RecentActivity
|
||||
ExtractIE.parentModuleName=Recent Activity
|
||||
@ -35,7 +35,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer
|
||||
ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1}
|
||||
ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1}
|
||||
ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry.
|
||||
ExtractRegistry.moduleName.text=Registry
|
||||
ExtractRegistry.moduleName.text=Windows Registry Analyzer
|
||||
ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0}
|
||||
ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1}
|
||||
ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1}
|
||||
@ -43,7 +43,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity
|
||||
ExtractRegistry.programName=RegRipper
|
||||
ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1}
|
||||
ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1}
|
||||
Firefox.moduleName=FireFox
|
||||
Firefox.moduleName=FireFox Analyzer
|
||||
Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox.
|
||||
Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found.
|
||||
Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1}
|
||||
@ -85,12 +85,12 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File
|
||||
RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1}
|
||||
RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
|
||||
RecentDocumentsByLnk.parentModuleName=Recent Activity
|
||||
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine
|
||||
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer
|
||||
SearchEngineURLQueryAnalyzer.engineName.none=NONE
|
||||
SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE
|
||||
SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3}
|
||||
SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity
|
||||
SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity
|
||||
ExtractWebAccountType.moduleName.text=Web Account Type
|
||||
ExtractWebAccountType.moduleName.text=Web Account Type Analyzer
|
||||
ExtractWebAccountType.parentModuleName=Recent Activity
|
||||
UsbDeviceIdMapper.parseAndLookup.text=Product: {0}
|
||||
|
@ -4,7 +4,6 @@ cannotParseXml=Unable to parse XML file:
|
||||
ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis.
|
||||
ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.
|
||||
ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s.
|
||||
ChromeCacheExtractor.moduleName=ChromeCacheExtractor
|
||||
# {0} - module name
|
||||
# {1} - row number
|
||||
# {2} - table length
|
||||
@ -13,25 +12,26 @@ ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries
|
||||
DataSourceUsage_AndroidMedia=Android Media Card
|
||||
DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card
|
||||
DataSourceUsage_FlashDrive=Flash Drive
|
||||
# {0} - OS name
|
||||
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
|
||||
DataSourceUsageAnalyzer.parentModuleName=Recent Activity
|
||||
DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer
|
||||
DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine
|
||||
DomainCategoryRunner_moduleName_text=DomainCategoryRunner
|
||||
DomainCategoryRunner_moduleName_text=Domain Category Analyzer
|
||||
DomainCategoryRunner_parentModuleName=Recent Activity
|
||||
DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types
|
||||
Extract.indexError.message=Failed to index artifact for keyword search.
|
||||
Extract.noOpenCase.errMsg=No open case available.
|
||||
ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history
|
||||
ExtractEdge_Module_Name=Microsoft Edge
|
||||
ExtractEdge_Module_Name=Microsoft Edge Analyzer
|
||||
ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file
|
||||
ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file
|
||||
ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer
|
||||
ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file
|
||||
# {0} - sub module name
|
||||
ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history
|
||||
ExtractOs.androidOs.label=Android
|
||||
ExtractOs.androidVolume.label=OS Drive (Android)
|
||||
ExtractOs.debianLinuxOs.label=Linux (Debian)
|
||||
ExtractOs.debianLinuxVolume.label=OS Drive (Linux Debian)
|
||||
ExtractOs.displayName=OS Info Analyzer
|
||||
ExtractOs.fedoraLinuxOs.label=Linux (Fedora)
|
||||
ExtractOs.fedoraLinuxVolume.label=OS Drive (Linux Fedora)
|
||||
ExtractOs.gentooLinuxOs.label=Linux (Gentoo)
|
||||
@ -42,7 +42,6 @@ ExtractOs.novellSUSEOs.label=Linux (Novell SUSE)
|
||||
ExtractOs.novellSUSEVolume.label=OS Drive (Linux Novell SUSE)
|
||||
ExtractOs.osx.label=Mac OS X
|
||||
ExtractOs.osxVolume.label=OS Drive (OS X)
|
||||
ExtractOs.parentModuleName=Recent Activity
|
||||
ExtractOs.redhatLinuxOs.label=Linux (Redhat)
|
||||
ExtractOs.redhatLinuxVolume.label=OS Drive (Linux Redhat)
|
||||
ExtractOs.slackwareLinuxOs.label=Linux (Slackware)
|
||||
@ -59,16 +58,17 @@ ExtractOs.windowsVolume.label=OS Drive (Windows)
|
||||
ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog)
|
||||
ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog)
|
||||
ExtractOS_progressMessage=Checking for OS
|
||||
# {0} - sub module name
|
||||
ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files
|
||||
ExtractPrefetch_module_name=Windows Prefetch Extractor
|
||||
ExtractRecycleBin_module_name=Recycle Bin
|
||||
ExtractPrefetch_module_name=Windows Prefetch Analyzer
|
||||
ExtractRecycleBin_module_name=Recycle Bin Analyzer
|
||||
ExtractRecycleBin_Recyle_Bin_Display_Name=Recycle Bin
|
||||
ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.
|
||||
ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files
|
||||
ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files
|
||||
ExtractSafari_Module_Name=Safari
|
||||
ExtractSafari_Module_Name=Safari Analyzer
|
||||
ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program
|
||||
ExtractSru_module_name=System Resource Usage Extractor
|
||||
ExtractSru_module_name=System Resource Usage Analyzer
|
||||
ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program
|
||||
ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file
|
||||
ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file
|
||||
@ -77,6 +77,7 @@ ExtractSru_process_errormsg_write_srudb_dat=Unable to write srudb.dat file
|
||||
ExtractWebAccountType.role.admin=Administrator role
|
||||
ExtractWebAccountType.role.moderator=Moderator role
|
||||
ExtractWebAccountType.role.user=User role
|
||||
ExtractZone_displayName=\ Zone Identifier Analyzer
|
||||
ExtractZone_Internet=Internet Zone
|
||||
ExtractZone_Local_Intranet=Local Intranet Zone
|
||||
ExtractZone_Local_Machine=Local Machine Zone
|
||||
@ -86,12 +87,12 @@ ExtractZone_progress_Msg=Extracting :Zone.Identifer files
|
||||
ExtractZone_Restricted=Restricted Sites Zone
|
||||
ExtractZone_Trusted=Trusted Sites Zone
|
||||
Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.
|
||||
Jumplist_module_name=Windows Jumplist Extractor
|
||||
Jumplist_module_name=Windows Jumplist Analyzer
|
||||
OpenIDE-Module-Display-Category=Ingest Module
|
||||
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
|
||||
OpenIDE-Module-Name=RecentActivity
|
||||
OpenIDE-Module-Short-Description=Recent Activity finder ingest module
|
||||
Chrome.moduleName=Chromium
|
||||
Chrome.moduleName=Chromium Analyzer
|
||||
Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files.
|
||||
Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files.
|
||||
Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1}
|
||||
@ -108,7 +109,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f
|
||||
Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
|
||||
Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files.
|
||||
Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
|
||||
ExtractIE.moduleName.text=Internet Explorer
|
||||
ExtractIE.moduleName.text=Internet Explorer Analyzer
|
||||
ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks.
|
||||
ExtractIE.parentModuleName.noSpace=RecentActivity
|
||||
ExtractIE.parentModuleName=Recent Activity
|
||||
@ -124,7 +125,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer
|
||||
ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1}
|
||||
ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1}
|
||||
ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry.
|
||||
ExtractRegistry.moduleName.text=Registry
|
||||
ExtractRegistry.moduleName.text=Windows Registry Analyzer
|
||||
ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0}
|
||||
ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1}
|
||||
ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1}
|
||||
@ -132,7 +133,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity
|
||||
ExtractRegistry.programName=RegRipper
|
||||
ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1}
|
||||
ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1}
|
||||
Firefox.moduleName=FireFox
|
||||
Firefox.moduleName=FireFox Analyzer
|
||||
Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox.
|
||||
Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found.
|
||||
Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1}
|
||||
@ -212,6 +213,7 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File
|
||||
RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1}
|
||||
RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
|
||||
RecentDocumentsByLnk.parentModuleName=Recent Activity
|
||||
RecentDocumentsByLnk_displayName=Recent Documents by Link Analyzer
|
||||
Recently_Used_Artifacts_Adobe=Recently opened according to Adobe MRU
|
||||
Recently_Used_Artifacts_Applets=Recently opened according to Applets registry key
|
||||
Recently_Used_Artifacts_ArcHistory=Recently opened by 7Zip
|
||||
@ -223,14 +225,15 @@ Recently_Used_Artifacts_Winrar=Recently opened according to WinRAR MRU
|
||||
Registry_System_Bam=Recently Executed according to Background Activity Moderator (BAM)
|
||||
RegRipperFullNotFound=Full version RegRipper executable not found.
|
||||
RegRipperNotFound=Autopsy RegRipper executable not found.
|
||||
# {0} - file name
|
||||
SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}.
|
||||
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine
|
||||
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer
|
||||
SearchEngineURLQueryAnalyzer.engineName.none=NONE
|
||||
SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE
|
||||
SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3}
|
||||
SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity
|
||||
SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity
|
||||
ExtractWebAccountType.moduleName.text=Web Account Type
|
||||
ExtractWebAccountType.moduleName.text=Web Account Type Analyzer
|
||||
ExtractWebAccountType.parentModuleName=Recent Activity
|
||||
Shellbag_Artifact_Display_Name=Shell Bags
|
||||
Shellbag_Key_Attribute_Display_Name=Key
|
||||
|
@ -2,7 +2,7 @@
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
*
|
||||
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
@ -151,15 +151,14 @@ final class ChromeCacheExtractor {
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"ChromeCacheExtractor.moduleName=ChromeCacheExtractor",
|
||||
"# {0} - module name",
|
||||
"# {1} - row number",
|
||||
"# {2} - table length",
|
||||
"# {3} - cache path",
|
||||
"ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}"
|
||||
})
|
||||
ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar ) {
|
||||
moduleName = Bundle.ChromeCacheExtractor_moduleName();
|
||||
ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
moduleName = NbBundle.getMessage(Chromium.class, "Chrome.moduleName");
|
||||
this.dataSource = dataSource;
|
||||
this.context = context;
|
||||
this.progressBar = progressBar;
|
||||
@ -415,7 +414,7 @@ final class ChromeCacheExtractor {
|
||||
progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_artifacts_msg(), artifactsAdded.size()));
|
||||
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
try {
|
||||
blackboard.postArtifacts(artifactsAdded, moduleName);
|
||||
blackboard.postArtifacts(artifactsAdded, moduleName, context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.WARNING, String.format("Failed to post cacheIndex artifacts "), ex); //NON-NLS
|
||||
}
|
||||
|
@ -54,8 +54,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -69,7 +67,7 @@ import org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper;
|
||||
* Chromium recent activity extraction
|
||||
*/
|
||||
class Chromium extends Extract {
|
||||
|
||||
|
||||
private static final String HISTORY_QUERY = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " //NON-NLS
|
||||
+ "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) AS from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; //NON-NLS
|
||||
private static final String COOKIE_QUERY = "SELECT name, value, host_key, expires_utc,last_access_utc, creation_utc FROM cookies"; //NON-NLS
|
||||
@ -94,13 +92,13 @@ class Chromium extends Extract {
|
||||
private static final String WEB_DATA_FILE_NAME = "Web Data";
|
||||
private static final String UC_BROWSER_NAME = "UC Browser";
|
||||
private static final String ENCRYPTED_FIELD_MESSAGE = "The data was encrypted.";
|
||||
|
||||
|
||||
private Boolean databaseEncrypted = false;
|
||||
private Boolean fieldEncrypted = false;
|
||||
|
||||
private final Logger logger = Logger.getLogger(this.getClass().getName());
|
||||
private Content dataSource;
|
||||
private IngestJobContext context;
|
||||
private final IngestJobContext context;
|
||||
|
||||
private static final Map<String, String> BROWSERS_MAP = ImmutableMap.<String, String>builder()
|
||||
.put("Microsoft Edge", "Microsoft/Edge/User Data/Default")
|
||||
@ -127,20 +125,19 @@ class Chromium extends Extract {
|
||||
"Progress_Message_Chrome_Logins=Chrome Logins Browser {0}",
|
||||
"Progress_Message_Chrome_Cache=Chrome Cache",})
|
||||
|
||||
Chromium() {
|
||||
super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName"));
|
||||
Chromium(IngestJobContext context) {
|
||||
super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName"), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
this.dataSource = dataSource;
|
||||
this.context = context;
|
||||
dataFound = false;
|
||||
long ingestJobId = context.getJobId();
|
||||
|
||||
for (Map.Entry<String, String> browser : BROWSERS_MAP.entrySet()) {
|
||||
String browserName = browser.getKey();
|
||||
String browserLocation = browser.getValue();
|
||||
progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_History", browserName));
|
||||
this.getHistory(browser.getKey(), browser.getValue(), ingestJobId);
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
@ -181,14 +178,14 @@ class Chromium extends Extract {
|
||||
progressBar.progress(Bundle.Progress_Message_Chrome_Cache());
|
||||
ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context, progressBar);
|
||||
chromeCacheExtractor.processCaches();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Query for history databases and add artifacts
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getHistory(String browser, String browserLocation, long ingestJobId) {
|
||||
FileManager fileManager = currentCase.getServices().getFileManager();
|
||||
@ -202,7 +199,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -238,13 +235,13 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome web history artifacts file '%s' (id=%d).",
|
||||
historyFile.getName(), historyFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile",
|
||||
this.getName(), historyFile.getName()));
|
||||
this.getDisplayName(), historyFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome web history artifacts file '%s' (id=%d).",
|
||||
temps, historyFile.getName(), historyFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile",
|
||||
this.getName(), historyFile.getName()));
|
||||
this.getDisplayName(), historyFile.getName()));
|
||||
continue;
|
||||
}
|
||||
File dbFile = new File(temps);
|
||||
@ -253,8 +250,8 @@ class Chromium extends Extract {
|
||||
break;
|
||||
}
|
||||
List<HashMap<String, Object>> tempList;
|
||||
tempList = this.dbConnect(temps, HISTORY_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS
|
||||
tempList = this.querySQLiteDb(temps, HISTORY_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : tempList) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
|
||||
@ -276,7 +273,7 @@ class Chromium extends Extract {
|
||||
(NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, historyFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create history artifact for file (%d)", historyFile.getId()), ex);
|
||||
}
|
||||
@ -291,9 +288,10 @@ class Chromium extends Extract {
|
||||
|
||||
/**
|
||||
* Search for bookmark files and make artifacts.
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getBookmark(String browser, String browserLocation, long ingestJobId) {
|
||||
FileManager fileManager = currentCase.getServices().getFileManager();
|
||||
@ -307,7 +305,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -319,7 +317,6 @@ class Chromium extends Extract {
|
||||
dataFound = true;
|
||||
Collection<BlackboardArtifact> bbartifacts = new ArrayList<>();
|
||||
int j = 0;
|
||||
|
||||
while (j < bookmarkFiles.size()) {
|
||||
AbstractFile bookmarkFile = bookmarkFiles.get(j++);
|
||||
if ((bookmarkFile.getSize() == 0) || (bookmarkFile.getName().toLowerCase().contains("-slack"))
|
||||
@ -335,17 +332,17 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome bookmark artifacts file '%s' (id=%d).",
|
||||
bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile",
|
||||
this.getName(), bookmarkFile.getName()));
|
||||
this.getDisplayName(), bookmarkFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome bookmark artifacts file '%s' (id=%d).",
|
||||
temps, bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile",
|
||||
this.getName(), bookmarkFile.getName()));
|
||||
this.getDisplayName(), bookmarkFile.getName()));
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS
|
||||
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getDisplayName(), temps}); //NON-NLS
|
||||
File dbFile = new File(temps);
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
dbFile.delete();
|
||||
@ -374,7 +371,7 @@ class Chromium extends Extract {
|
||||
} catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) {
|
||||
logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile3",
|
||||
this.getName(), bookmarkFile.getName()));
|
||||
this.getDisplayName(), bookmarkFile.getName()));
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -419,14 +416,14 @@ class Chromium extends Extract {
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), domain));
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, bookmarkFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create bookmark artifact for file (%d)", bookmarkFile.getId()), ex);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
if(!context.dataSourceIngestIsCancelled()) {
|
||||
|
||||
if (!context.dataSourceIngestIsCancelled()) {
|
||||
postArtifacts(bbartifacts);
|
||||
}
|
||||
bbartifacts.clear();
|
||||
@ -436,9 +433,10 @@ class Chromium extends Extract {
|
||||
|
||||
/**
|
||||
* Queries for cookie files and adds artifacts
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getCookie(String browser, String browserLocation, long ingestJobId) {
|
||||
|
||||
@ -455,7 +453,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -479,13 +477,13 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome cookie artifacts file '%s' (id=%d).",
|
||||
cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile",
|
||||
this.getName(), cookiesFile.getName()));
|
||||
this.getDisplayName(), cookiesFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome cookie artifacts file '%s' (id=%d).",
|
||||
temps, cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile",
|
||||
this.getName(), cookiesFile.getName()));
|
||||
this.getDisplayName(), cookiesFile.getName()));
|
||||
continue;
|
||||
}
|
||||
File dbFile = new File(temps);
|
||||
@ -494,8 +492,8 @@ class Chromium extends Extract {
|
||||
break;
|
||||
}
|
||||
|
||||
List<HashMap<String, Object>> tempList = this.dbConnect(temps, COOKIE_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS
|
||||
List<HashMap<String, Object>> tempList = this.querySQLiteDb(temps, COOKIE_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : tempList) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
|
||||
@ -519,7 +517,7 @@ class Chromium extends Extract {
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), domain));
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create cookie artifact for file (%d)", cookiesFile.getId()), ex);
|
||||
}
|
||||
@ -535,9 +533,10 @@ class Chromium extends Extract {
|
||||
|
||||
/**
|
||||
* Queries for download files and adds artifacts
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getDownload(String browser, String browserLocation, long ingestJobId) {
|
||||
FileManager fileManager = currentCase.getServices().getFileManager();
|
||||
@ -551,7 +550,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -577,13 +576,13 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome download artifacts file '%s' (id=%d).",
|
||||
downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1",
|
||||
this.getName(), downloadFile.getName()));
|
||||
this.getDisplayName(), downloadFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome download artifacts file '%s' (id=%d).",
|
||||
temps, downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1",
|
||||
this.getName(), downloadFile.getName()));
|
||||
this.getDisplayName(), downloadFile.getName()));
|
||||
continue;
|
||||
}
|
||||
File dbFile = new File(temps);
|
||||
@ -595,12 +594,12 @@ class Chromium extends Extract {
|
||||
List<HashMap<String, Object>> tempList;
|
||||
|
||||
if (isChromePreVersion30(temps)) {
|
||||
tempList = this.dbConnect(temps, DOWNLOAD_QUERY);
|
||||
tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY);
|
||||
} else {
|
||||
tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V30);
|
||||
tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY_V30);
|
||||
}
|
||||
|
||||
logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS
|
||||
logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : tempList) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
String fullPath = result.get("full_path").toString(); //NON-NLS
|
||||
@ -628,9 +627,9 @@ class Chromium extends Extract {
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), browser));
|
||||
|
||||
// find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact.
|
||||
// find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact.
|
||||
try {
|
||||
BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes);
|
||||
BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, downloadFile, bbattributes);
|
||||
bbartifacts.add(webDownloadArtifact);
|
||||
String normalizedFullPath = FilenameUtils.normalize(fullPath, true);
|
||||
for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) {
|
||||
@ -652,9 +651,10 @@ class Chromium extends Extract {
|
||||
|
||||
/**
|
||||
* Gets user logins from Login Data sqlite database
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getLogins(String browser, String browserLocation, long ingestJobId) {
|
||||
|
||||
@ -670,7 +670,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -694,13 +694,13 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome login artifacts file '%s' (id=%d).",
|
||||
loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
|
||||
this.getName(), loginDataFile.getName()));
|
||||
this.getDisplayName(), loginDataFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome login artifacts file '%s' (id=%d).",
|
||||
temps, loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
|
||||
this.getName(), loginDataFile.getName()));
|
||||
this.getDisplayName(), loginDataFile.getName()));
|
||||
continue;
|
||||
}
|
||||
File dbFile = new File(temps);
|
||||
@ -708,8 +708,8 @@ class Chromium extends Extract {
|
||||
dbFile.delete();
|
||||
break;
|
||||
}
|
||||
List<HashMap<String, Object>> tempList = this.dbConnect(temps, LOGIN_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS
|
||||
List<HashMap<String, Object>> tempList = this.querySQLiteDb(temps, LOGIN_QUERY);
|
||||
logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : tempList) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
|
||||
@ -741,7 +741,7 @@ class Chromium extends Extract {
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), browser));
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create service account artifact for file (%d)", loginDataFile.getId()), ex);
|
||||
}
|
||||
@ -758,9 +758,10 @@ class Chromium extends Extract {
|
||||
/**
|
||||
* Gets and parses Autofill data from 'Web Data' database, and creates
|
||||
* TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS artifacts
|
||||
*
|
||||
* @param browser
|
||||
* @param browserLocation
|
||||
* @param ingestJobId The ingest job id.
|
||||
* @param ingestJobId The ingest job id.
|
||||
*/
|
||||
private void getAutofill(String browser, String browserLocation, long ingestJobId) {
|
||||
|
||||
@ -776,7 +777,7 @@ class Chromium extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getAutofills.errMsg.errGettingFiles");
|
||||
logger.log(Level.SEVERE, msg, ex);
|
||||
this.addErrorMessage(this.getName() + ": " + msg);
|
||||
this.addErrorMessage(this.getDisplayName() + ": " + msg);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -801,13 +802,13 @@ class Chromium extends Extract {
|
||||
logger.log(Level.WARNING, String.format("Error reading Chrome Autofill artifacts file '%s' (id=%d).",
|
||||
webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getAutofill.errMsg.errAnalyzingFiles",
|
||||
this.getName(), webDataFile.getName()));
|
||||
this.getDisplayName(), webDataFile.getName()));
|
||||
continue;
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome Web data file '%s' (id=%d).",
|
||||
tempFilePath, webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
|
||||
this.getName(), webDataFile.getName()));
|
||||
this.getDisplayName(), webDataFile.getName()));
|
||||
continue;
|
||||
}
|
||||
File dbFile = new File(tempFilePath);
|
||||
@ -826,20 +827,20 @@ class Chromium extends Extract {
|
||||
getFormAddressArtifacts(webDataFile, tempFilePath, isSchemaV8X);
|
||||
if (databaseEncrypted) {
|
||||
String comment = String.format("%s Autofill Database Encryption Detected", browser);
|
||||
Collection<BlackboardAttribute> bbattributes = Arrays.asList(
|
||||
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), comment));
|
||||
Collection<BlackboardAttribute> bbattributes = Arrays.asList(
|
||||
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), comment));
|
||||
|
||||
bbartifacts.add(
|
||||
webDataFile.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE,
|
||||
null, null, comment, bbattributes).getAnalysisResult());
|
||||
bbartifacts.add(
|
||||
webDataFile.newAnalysisResult(
|
||||
BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE,
|
||||
null, null, comment, bbattributes).getAnalysisResult());
|
||||
}
|
||||
} catch (NoCurrentCaseException | TskCoreException | Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Error adding artifacts to the case database "
|
||||
+ "for chrome file %s [objId=%d]", webDataFile.getName(), webDataFile.getId()), ex);
|
||||
}
|
||||
|
||||
|
||||
dbFile.delete();
|
||||
}
|
||||
|
||||
@ -852,8 +853,8 @@ class Chromium extends Extract {
|
||||
* Extracts and returns autofill artifacts from the given database file
|
||||
*
|
||||
* @param webDataFile - the database file in the data source
|
||||
* @param dbFilePath - path to a temporary file where the DB file is
|
||||
* extracted
|
||||
* @param dbFilePath - path to a temporary file where the DB file is
|
||||
* extracted
|
||||
* @param isSchemaV8X - indicates of the DB schema version is 8X or greater
|
||||
*
|
||||
* @return collection of TSK_WEB_FORM_AUTOFILL artifacts
|
||||
@ -866,8 +867,8 @@ class Chromium extends Extract {
|
||||
String autoFillquery = (isSchemaV8X) ? AUTOFILL_QUERY_V8X
|
||||
: AUTOFILL_QUERY;
|
||||
|
||||
List<HashMap<String, Object>> autofills = this.dbConnect(dbFilePath, autoFillquery);
|
||||
logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, autofills.size()}); //NON-NLS
|
||||
List<HashMap<String, Object>> autofills = this.querySQLiteDb(dbFilePath, autoFillquery);
|
||||
logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, autofills.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : autofills) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
|
||||
@ -902,10 +903,10 @@ class Chromium extends Extract {
|
||||
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE));
|
||||
}
|
||||
|
||||
|
||||
// Add an artifact
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create web form autopfill artifact for file (%d)", webDataFile.getId()), ex);
|
||||
}
|
||||
@ -920,8 +921,8 @@ class Chromium extends Extract {
|
||||
* database file
|
||||
*
|
||||
* @param webDataFile - the database file in the data source
|
||||
* @param dbFilePath - path to a temporary file where the DB file is
|
||||
* extracted
|
||||
* @param dbFilePath - path to a temporary file where the DB file is
|
||||
* extracted
|
||||
* @param isSchemaV8X - indicates of the DB schema version is 8X or greater
|
||||
*
|
||||
* @return collection of TSK_WEB_FORM_ADDRESS artifacts
|
||||
@ -936,16 +937,16 @@ class Chromium extends Extract {
|
||||
WebBrowserArtifactsHelper helper = new WebBrowserArtifactsHelper(
|
||||
Case.getCurrentCaseThrows().getSleuthkitCase(),
|
||||
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
|
||||
webDataFile
|
||||
webDataFile, context.getJobId()
|
||||
);
|
||||
|
||||
// Get Web form addresses
|
||||
List<HashMap<String, Object>> addresses = this.dbConnect(dbFilePath, webformAddressQuery);
|
||||
logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, addresses.size()}); //NON-NLS
|
||||
List<HashMap<String, Object>> addresses = this.querySQLiteDb(dbFilePath, webformAddressQuery);
|
||||
logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, addresses.size()}); //NON-NLS
|
||||
for (HashMap<String, Object> result : addresses) {
|
||||
|
||||
fieldEncrypted = false;
|
||||
|
||||
|
||||
String first_name = processFields(result.get("first_name"));
|
||||
String middle_name = processFields(result.get("middle_name"));
|
||||
String last_name = processFields(result.get("last_name"));
|
||||
@ -968,7 +969,7 @@ class Chromium extends Extract {
|
||||
long use_date = 0;
|
||||
|
||||
if (isSchemaV8X) {
|
||||
|
||||
|
||||
full_name = processFields(result.get("full_name"));
|
||||
street_address = processFields(result.get("street_address"));
|
||||
date_modified = result.get("date_modified").toString() != null ? Long.valueOf(result.get("date_modified").toString()) : 0;
|
||||
@ -995,7 +996,7 @@ class Chromium extends Extract {
|
||||
if (fieldEncrypted) {
|
||||
otherAttributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE)); //NON-NLS
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -1007,9 +1008,12 @@ class Chromium extends Extract {
|
||||
}
|
||||
|
||||
/**
|
||||
* Check the type of the object and if it is bytes then it is encrypted and return the string and
|
||||
* set flag that field and file are encrypted
|
||||
* @param dataValue Object to be checked, the object is from a database result set
|
||||
* Check the type of the object and if it is bytes then it is encrypted and
|
||||
* return the string and set flag that field and file are encrypted
|
||||
*
|
||||
* @param dataValue Object to be checked, the object is from a database
|
||||
* result set
|
||||
*
|
||||
* @return the actual string or an empty string
|
||||
*/
|
||||
private String processFields(Object dataValue) {
|
||||
@ -1018,14 +1022,14 @@ class Chromium extends Extract {
|
||||
fieldEncrypted = true;
|
||||
databaseEncrypted = true;
|
||||
}
|
||||
|
||||
|
||||
return dataValue.toString() != null ? dataValue.toString() : "";
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
private boolean isChromePreVersion30(String temps) {
|
||||
String query = "PRAGMA table_info(downloads)"; //NON-NLS
|
||||
List<HashMap<String, Object>> columns = this.dbConnect(temps, query);
|
||||
List<HashMap<String, Object>> columns = this.querySQLiteDb(temps, query);
|
||||
for (HashMap<String, Object> col : columns) {
|
||||
if (col.get("name").equals("url")) { //NON-NLS
|
||||
return true;
|
||||
|
@ -42,7 +42,7 @@ import org.sleuthkit.datamodel.TskData;
|
||||
* systems the images may have been used by.
|
||||
*
|
||||
*/
|
||||
@Messages({"DataSourceUsageAnalyzer.parentModuleName=Recent Activity"})
|
||||
@Messages({"DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer"})
|
||||
class DataSourceUsageAnalyzer extends Extract {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(DataSourceUsageAnalyzer.class.getName());
|
||||
@ -56,37 +56,38 @@ class DataSourceUsageAnalyzer extends Extract {
|
||||
{".android_secure", "android", "audio",
|
||||
"photos", "dcim", "music", "pictures", "videos"}; //NON-NLS
|
||||
private Content dataSource;
|
||||
private final IngestJobContext context;
|
||||
|
||||
DataSourceUsageAnalyzer(IngestJobContext context) {
|
||||
super(Bundle.DataSourceUsageAnalyzer_displayName(), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Messages({
|
||||
"# {0} - OS name",
|
||||
"DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})",
|
||||
"Progress_Message_Analyze_Usage=Data Sources Usage Analysis",})
|
||||
@Override
|
||||
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
this.dataSource = dataSource;
|
||||
try {
|
||||
progressBar.progress(Bundle.Progress_Message_Analyze_Usage());
|
||||
createDataSourceUsageArtifacts(context);
|
||||
createDataSourceUsageArtifacts();
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Failed to check if datasource contained a volume with operating system specific files", ex);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createDataSourceUsageArtifacts(IngestJobContext context) throws TskCoreException {
|
||||
|
||||
private void createDataSourceUsageArtifacts() throws TskCoreException {
|
||||
createOSInfoDataSourceUsageArtifacts();
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
createAndroidMediaCardArtifacts();
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
createDJIDroneDATArtitifacts();
|
||||
}
|
||||
|
||||
@ -146,9 +147,9 @@ class DataSourceUsageAnalyzer extends Extract {
|
||||
}
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
|
||||
Bundle.DataSourceUsageAnalyzer_parentModuleName(),
|
||||
getRAModuleName(),
|
||||
dataSourceUsageDescription)); //NON-NLS
|
||||
postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes));
|
||||
postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -160,7 +161,7 @@ class DataSourceUsageAnalyzer extends Extract {
|
||||
*/
|
||||
private void checkIfOsSpecificVolume(ExtractOs.OS_TYPE osType) throws TskCoreException {
|
||||
for (String filePath : osType.getFilePaths()) {
|
||||
for (AbstractFile file : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource,
|
||||
for (AbstractFile file : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource,
|
||||
FilenameUtils.getName(filePath), FilenameUtils.getPath(filePath))) {
|
||||
createDataSourceUsageArtifact(osType.getDsUsageLabel());
|
||||
return;
|
||||
@ -199,7 +200,7 @@ class DataSourceUsageAnalyzer extends Extract {
|
||||
return;
|
||||
}
|
||||
|
||||
if(hasAndroidMediaCardRootNames()) {
|
||||
if (hasAndroidMediaCardRootNames()) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -214,12 +215,12 @@ class DataSourceUsageAnalyzer extends Extract {
|
||||
|
||||
/**
|
||||
* Checks the data source for any android media card root files
|
||||
*
|
||||
*
|
||||
* @return True if root files were found
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private boolean hasAndroidMediaCardRootNames() throws TskCoreException{
|
||||
private boolean hasAndroidMediaCardRootNames() throws TskCoreException {
|
||||
FileManager fileManager = currentCase.getServices().getFileManager();
|
||||
for (String fileName : ANDROID_MEDIACARD_ROOT_FILENAMES) {
|
||||
for (AbstractFile file : fileManager.findFiles(dataSource, fileName, "/")) { // NON-NLS
|
||||
|
@ -44,7 +44,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -59,7 +58,7 @@ import org.sleuthkit.autopsy.url.analytics.DomainCategory;
|
||||
* is created.
|
||||
*/
|
||||
@Messages({
|
||||
"DomainCategoryRunner_moduleName_text=DomainCategoryRunner",
|
||||
"DomainCategoryRunner_moduleName_text=Domain Category Analyzer",
|
||||
"DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types",
|
||||
"DomainCategoryRunner_parentModuleName=Recent Activity"
|
||||
})
|
||||
@ -98,13 +97,15 @@ class DomainCategoryRunner extends Extract {
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)
|
||||
.map(BlackboardArtifact.Type::new)
|
||||
.collect(Collectors.toList());
|
||||
private final IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Get seconds from epoch from the mapping for the attribute type id.
|
||||
*
|
||||
* @param attrMap A mapping of attribute type id to BlackboardAttribute for
|
||||
* an artifact.
|
||||
* @param attrMap A mapping of attribute type id to BlackboardAttribute
|
||||
* for an artifact.
|
||||
* @param attrTypeId The attribute type id to fetch.
|
||||
*
|
||||
* @return The time in seconds from epoch or 0 if cannot be found.
|
||||
*/
|
||||
private static long getTimeOrZero(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) {
|
||||
@ -119,9 +120,10 @@ class DomainCategoryRunner extends Extract {
|
||||
/**
|
||||
* Get string for attribute type id or "" if cannot be determined.
|
||||
*
|
||||
* @param attrMap A mapping of attribute type id to BlackboardAttribute for
|
||||
* an artifact.
|
||||
* @param attrMap A mapping of attribute type id to BlackboardAttribute
|
||||
* for an artifact.
|
||||
* @param attrTypeId The attribute type id to fetch.
|
||||
*
|
||||
* @return The string value or "" if cannot be determined or null.
|
||||
*/
|
||||
private static String getStringOrEmpty(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) {
|
||||
@ -174,14 +176,14 @@ class DomainCategoryRunner extends Extract {
|
||||
};
|
||||
|
||||
private Content dataSource;
|
||||
private IngestJobContext context;
|
||||
private List<DomainCategorizer> domainProviders = Collections.emptyList();
|
||||
|
||||
/**
|
||||
* Main constructor.
|
||||
*/
|
||||
DomainCategoryRunner() {
|
||||
|
||||
DomainCategoryRunner(IngestJobContext context) {
|
||||
super(Bundle.DomainCategoryRunner_moduleName_text(), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -189,6 +191,7 @@ class DomainCategoryRunner extends Extract {
|
||||
* determined, returns null.
|
||||
*
|
||||
* @param urlString The url string.
|
||||
*
|
||||
* @return The host or null if cannot be determined.
|
||||
*/
|
||||
private String getHost(String urlString) {
|
||||
@ -218,7 +221,8 @@ class DomainCategoryRunner extends Extract {
|
||||
* Attempts to find the category for the given host/domain.
|
||||
*
|
||||
* @param domain The domain for the item.
|
||||
* @param host The host for the item.
|
||||
* @param host The host for the item.
|
||||
*
|
||||
* @return The domain category result or null if none can be determined.
|
||||
*/
|
||||
private DomainCategory findCategory(String domain, String host) {
|
||||
@ -252,8 +256,10 @@ class DomainCategoryRunner extends Extract {
|
||||
* Main constructor.
|
||||
*
|
||||
* @param abstractFile The parent file of the artifact.
|
||||
* @param host The host of the artifact found in the url attribute.
|
||||
* @param domain The domain of the artifact in the TSK_DOMAIN attribute.
|
||||
* @param host The host of the artifact found in the url
|
||||
* attribute.
|
||||
* @param domain The domain of the artifact in the TSK_DOMAIN
|
||||
* attribute.
|
||||
*/
|
||||
ArtifactHost(AbstractFile abstractFile, String host, String domain) {
|
||||
this.abstractFile = abstractFile;
|
||||
@ -288,8 +294,10 @@ class DomainCategoryRunner extends Extract {
|
||||
* parent file.
|
||||
*
|
||||
* @param artifact The web artifact to parse.
|
||||
*
|
||||
* @return The pertinent information or null if important information cannot
|
||||
* be determined.
|
||||
* be determined.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private ArtifactHost getDomainAndHost(BlackboardArtifact artifact) throws TskCoreException {
|
||||
@ -337,9 +345,10 @@ class DomainCategoryRunner extends Extract {
|
||||
* item is added to the set.
|
||||
*
|
||||
* @param items The set of items.
|
||||
* @param item The item whose existence will be checked in the set.
|
||||
* @param item The item whose existence will be checked in the set.
|
||||
*
|
||||
* @return True if item is already contained in 'items'. False if the is
|
||||
* null or if not contained in 'items'.
|
||||
* null or if not contained in 'items'.
|
||||
*/
|
||||
private static boolean isDuplicateOrAdd(Set<String> items, String item) {
|
||||
if (StringUtils.isBlank(item)) {
|
||||
@ -428,8 +437,8 @@ class DomainCategoryRunner extends Extract {
|
||||
/**
|
||||
* Adds a TSK_WEB_CATEGORIZATION artifact for the given information.
|
||||
*
|
||||
* @param artHost Pertinent details for the artifact (i.e. host, domain,
|
||||
* parent file).
|
||||
* @param artHost Pertinent details for the artifact (i.e. host,
|
||||
* domain, parent file).
|
||||
* @param domainCategory The category for this host/domain.
|
||||
*/
|
||||
private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) throws TskCoreException {
|
||||
@ -439,60 +448,58 @@ class DomainCategoryRunner extends Extract {
|
||||
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HOST, moduleName, artHost.getHost()),
|
||||
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, moduleName, domainCategory)
|
||||
);
|
||||
postArtifact(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes));
|
||||
postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
this.dataSource = dataSource;
|
||||
this.context = context;
|
||||
|
||||
progressBar.progress(Bundle.DomainCategoryRunner_Progress_Message_Domain_Types());
|
||||
this.findDomainTypes();
|
||||
}
|
||||
|
||||
@Override
|
||||
void configExtractor() throws IngestModule.IngestModuleException {
|
||||
void startUp() throws IngestModule.IngestModuleException {
|
||||
// lookup all providers, filter null providers, and sort providers
|
||||
Collection<? extends DomainCategorizer> lookupCollection = Lookup.getDefault().lookupAll(DomainCategorizer.class);
|
||||
Collection<? extends DomainCategorizer> lookupList = (lookupCollection == null) ?
|
||||
Collections.emptyList() :
|
||||
lookupCollection;
|
||||
|
||||
Collection<? extends DomainCategorizer> lookupList = (lookupCollection == null)
|
||||
? Collections.emptyList()
|
||||
: lookupCollection;
|
||||
|
||||
// this will be the class instance of the foundProviders
|
||||
List<DomainCategorizer> foundProviders = new ArrayList<>();
|
||||
|
||||
|
||||
// find the custom domain categories provider if present and add it first to the list
|
||||
lookupList.stream()
|
||||
.filter(categorizer -> categorizer.getClass().getName().contains(CUSTOM_CATEGORIZER_PATH))
|
||||
.findFirst()
|
||||
.ifPresent((provider) -> foundProviders.add(provider));
|
||||
|
||||
|
||||
// add the default priority categorizer
|
||||
foundProviders.add(new DefaultPriorityDomainCategorizer());
|
||||
|
||||
|
||||
// add all others except for the custom web domain categorizer, the default priority
|
||||
// categorizer and the default categorizer
|
||||
lookupList.stream()
|
||||
.filter(categorizer -> categorizer != null)
|
||||
.filter(categorizer -> {
|
||||
String className = categorizer.getClass().getName();
|
||||
return !className.contains(CUSTOM_CATEGORIZER_PATH) &&
|
||||
!className.equals(DefaultPriorityDomainCategorizer.class.getName()) &&
|
||||
!className.equals(DefaultDomainCategorizer.class.getName());
|
||||
return !className.contains(CUSTOM_CATEGORIZER_PATH)
|
||||
&& !className.equals(DefaultPriorityDomainCategorizer.class.getName())
|
||||
&& !className.equals(DefaultDomainCategorizer.class.getName());
|
||||
})
|
||||
.sorted((a, b) -> a.getClass().getName().compareToIgnoreCase(b.getClass().getName()))
|
||||
.forEach(foundProviders::add);
|
||||
|
||||
|
||||
// add the default categorizer last
|
||||
foundProviders.add(new DefaultDomainCategorizer());
|
||||
|
||||
|
||||
for (DomainCategorizer provider : foundProviders) {
|
||||
try {
|
||||
provider.initialize();
|
||||
} catch (DomainCategorizerException ex) {
|
||||
throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " +
|
||||
provider.getClass().getSimpleName(), ex);
|
||||
throw new IngestModule.IngestModuleException("There was an error instantiating the provider: "
|
||||
+ provider.getClass().getSimpleName(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
@ -500,7 +507,7 @@ class DomainCategoryRunner extends Extract {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void complete() {
|
||||
public void shutDown() {
|
||||
if (this.domainProviders != null) {
|
||||
for (DomainCategorizer provider : this.domainProviders) {
|
||||
try {
|
||||
@ -510,7 +517,6 @@ class DomainCategoryRunner extends Extract {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Domain categorization completed."); //NON-NLS
|
||||
super.shutDown();
|
||||
}
|
||||
}
|
||||
|
@ -1,19 +1,19 @@
|
||||
/*
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
*
|
||||
* Copyright 2012-2021 Basis Technology Corp.
|
||||
*
|
||||
*
|
||||
* Copyright 2012 42six Solutions.
|
||||
* Contact: aebadirad <at> 42six <dot> com
|
||||
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -35,9 +35,7 @@ import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
@ -47,222 +45,195 @@ import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.Category;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Score;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
|
||||
abstract class Extract {
|
||||
|
||||
protected Case currentCase;
|
||||
protected SleuthkitCase tskCase;
|
||||
protected Blackboard blackboard;
|
||||
private final Logger logger = Logger.getLogger(this.getClass().getName());
|
||||
protected final Case currentCase;
|
||||
protected final SleuthkitCase tskCase;
|
||||
private static final Logger logger = Logger.getLogger(Extract.class.getName());
|
||||
private final ArrayList<String> errorMessages = new ArrayList<>();
|
||||
private String moduleName = "";
|
||||
boolean dataFound = false;
|
||||
private RAOsAccountCache osAccountCache = null;
|
||||
|
||||
Extract() {
|
||||
this("");
|
||||
}
|
||||
|
||||
Extract(String moduleName) {
|
||||
this.moduleName = moduleName;
|
||||
}
|
||||
|
||||
final void init() throws IngestModuleException {
|
||||
try {
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
tskCase = currentCase.getSleuthkitCase();
|
||||
blackboard = tskCase.getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.Extract_indexError_message(), ex);
|
||||
}
|
||||
configExtractor();
|
||||
}
|
||||
|
||||
/**
|
||||
* Override to add any module-specific configuration
|
||||
*
|
||||
* @throws IngestModuleException
|
||||
*/
|
||||
void configExtractor() throws IngestModuleException {
|
||||
}
|
||||
private final String displayName;
|
||||
protected boolean dataFound = false;
|
||||
private final IngestJobContext context;
|
||||
|
||||
/**
|
||||
* Extractor process method intended to mirror the Ingest process method.
|
||||
*
|
||||
* Subclasses should overload just the abstract version of the method.
|
||||
*
|
||||
* @param dataSource The data source object to ingest.
|
||||
* @param context The the context for the current job.
|
||||
* @param progressBar A handle to the progressBar for the module to update with status.
|
||||
* @param osAccountCache The OsAccountCache.
|
||||
*/
|
||||
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar, RAOsAccountCache osAccountCache) {
|
||||
this.osAccountCache = osAccountCache;
|
||||
process(dataSource, context, progressBar);
|
||||
}
|
||||
|
||||
abstract void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar);
|
||||
|
||||
void complete() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a List of string error messages from the inheriting class
|
||||
* Constructs the super class part of an extractor used by the Recent
|
||||
* Activity ingest module to do its analysis for an ingest job.
|
||||
*
|
||||
* @return errorMessages returns all error messages logged
|
||||
* @param displayName The display name of the extractor.
|
||||
* @param context The ingest job context.
|
||||
*/
|
||||
Extract(String displayName, IngestJobContext context) {
|
||||
this.displayName = displayName;
|
||||
this.context = context;
|
||||
currentCase = Case.getCurrentCase();
|
||||
tskCase = currentCase.getSleuthkitCase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts up this extractor. Called by the Recent Activity ingest module in
|
||||
* its startUp() method.
|
||||
*
|
||||
* @throws IngestModuleException The exception is thrown if there is an
|
||||
* error starting up the extractor.
|
||||
*/
|
||||
void startUp() throws IngestModuleException {
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes the given data source. Called by the Recent Activity ingest
|
||||
* module in its process() method.
|
||||
*
|
||||
* @param dataSource The data source to be analyzed.
|
||||
* @param progressBar A progress object that can be used to report analysis
|
||||
* progress.
|
||||
*/
|
||||
abstract void process(Content dataSource, DataSourceIngestModuleProgress progressBar);
|
||||
|
||||
/**
|
||||
* Shuts down this extractor. Called by the Recent Activity ingest module in
|
||||
* its shutDown() method.
|
||||
*/
|
||||
void shutDown() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets any error messages generated by the extractor during processing.
|
||||
*
|
||||
* @return errorMessages The error message strings.
|
||||
*/
|
||||
List<String> getErrorMessages() {
|
||||
return errorMessages;
|
||||
return Collections.unmodifiableList(errorMessages);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a string to the error message list
|
||||
* Adds an error message to the collection of error messages generated by
|
||||
* the extractor during processing.
|
||||
*
|
||||
* @param message is an error message represented as a string
|
||||
* @param message The error message.
|
||||
*/
|
||||
protected void addErrorMessage(String message) {
|
||||
errorMessages.add(message);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Generic method for creating artifacts.
|
||||
* Creates an artifact with the given attributes.
|
||||
*
|
||||
* @param type The type of artifact.
|
||||
* @param file The file the artifact originated from.
|
||||
* @param attributes A list of the attributes to associate with the
|
||||
* artifact.
|
||||
*
|
||||
* @return The newly created artifact.
|
||||
*/
|
||||
BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException {
|
||||
return createArtifactWithAttributes(new BlackboardArtifact.Type(type), content, attributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic method for creating artifacts.
|
||||
*
|
||||
* @param type The type of artifact.
|
||||
* @param content The file the artifact originated from.
|
||||
* @param attributes A list of the attributes to associate with the
|
||||
* artifact.
|
||||
* @param type The artifact type.
|
||||
* @param content The artifact source/parent.
|
||||
* @param attributes The attributes.
|
||||
*
|
||||
* @return The newly created artifact.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @throws TskCoreException This exception is thrown if there is an issue
|
||||
* creating the artifact.
|
||||
*/
|
||||
BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException {
|
||||
switch (type.getCategory()) {
|
||||
case DATA_ARTIFACT:
|
||||
return content.newDataArtifact(type, attributes);
|
||||
case ANALYSIS_RESULT:
|
||||
return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult();
|
||||
default:
|
||||
throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName());
|
||||
if (type.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) {
|
||||
return content.newDataArtifact(type, attributes);
|
||||
} else if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) {
|
||||
return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult();
|
||||
} else {
|
||||
throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns and associated artifact for the given artifact.
|
||||
* Creates an associated artifact for a given artifact.
|
||||
*
|
||||
* @param content The content to create the artifact from.
|
||||
* @param artifact The artifact to associate the new artifact with.
|
||||
* @param content The artifact source/parent.
|
||||
* @param artifact The artifact with which to associate the new artifact.
|
||||
*
|
||||
* @return The newly created artifact.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @throws TskCoreException This exception is thrown if there is an issue
|
||||
* creating the artifact.
|
||||
*/
|
||||
BlackboardArtifact createAssociatedArtifact(Content content, BlackboardArtifact artifact) throws TskCoreException {
|
||||
return createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), artifact.getArtifactID())));
|
||||
BlackboardAttribute attribute = new BlackboardAttribute(BlackboardAttribute.Type.TSK_ASSOCIATED_ARTIFACT, getRAModuleName(), artifact.getArtifactID());
|
||||
return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(attribute));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Method to post a blackboard artifact to the blackboard.
|
||||
* Posts an artifact to the blackboard.
|
||||
*
|
||||
* @param bbart Blackboard artifact to be indexed. Nothing will occure if a null object is passed in.
|
||||
* @param artifact The artifact.
|
||||
*/
|
||||
@Messages({"Extract.indexError.message=Failed to index artifact for keyword search.",
|
||||
"Extract.noOpenCase.errMsg=No open case available."})
|
||||
void postArtifact(BlackboardArtifact bbart) {
|
||||
if(bbart == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.postArtifact(bbart, getName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bbart.getDisplayName(), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to post a list of BlackboardArtifacts to the blackboard.
|
||||
*
|
||||
* @param artifacts A list of artifacts. IF list is empty or null, the function will return.
|
||||
*/
|
||||
void postArtifacts(Collection<BlackboardArtifact> artifacts) {
|
||||
if(artifacts == null || artifacts.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try{
|
||||
blackboard.postArtifacts(artifacts, getName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to post blackboard artifacts", ex); //NON-NLS
|
||||
void postArtifact(BlackboardArtifact artifact) {
|
||||
if (artifact != null && !context.dataArtifactIngestIsCancelled()) {
|
||||
postArtifacts(Collections.singleton(artifact));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Posts a collection of artifacts to the blackboard.
|
||||
*
|
||||
* @param artifacts The artifacts.
|
||||
*/
|
||||
void postArtifacts(Collection<BlackboardArtifact> artifacts) {
|
||||
if (artifacts != null && !artifacts.isEmpty() && !context.dataArtifactIngestIsCancelled()) {
|
||||
try {
|
||||
tskCase.getBlackboard().postArtifacts(artifacts, RecentActivityExtracterModuleFactory.getModuleName(), context.getJobId());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to post artifacts", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Connects to a SQLite database file (e.g., an application database) and
|
||||
* executes a query.
|
||||
*
|
||||
* Returns a List from a result set based on sql query. This is used to
|
||||
* query sqlite databases storing user recent activity data, such as in
|
||||
* firefox sqlite db
|
||||
*
|
||||
* @param path is the string path to the sqlite db file
|
||||
* @param query is a sql string query that is to be run
|
||||
* @param path The path to the SQLite database file
|
||||
* @param query The SQL query to be executed.
|
||||
*
|
||||
* @return list is the ArrayList that contains the resultset information in
|
||||
* it that the query obtained
|
||||
* @return A list of maps that represents the query results. Each map entry
|
||||
* consists of a column name as a key and an Object as a column
|
||||
* value, with empty strings substituted for nulls.
|
||||
*/
|
||||
protected List<HashMap<String, Object>> dbConnect(String path, String query) {
|
||||
ResultSet temprs;
|
||||
protected List<HashMap<String, Object>> querySQLiteDb(String path, String query) {
|
||||
ResultSet resultSet;
|
||||
List<HashMap<String, Object>> list;
|
||||
String connectionString = "jdbc:sqlite:" + path; //NON-NLS
|
||||
SQLiteDBConnect tempdbconnect = null;
|
||||
SQLiteDBConnect dbConnection = null;
|
||||
try {
|
||||
tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS
|
||||
temprs = tempdbconnect.executeQry(query);
|
||||
list = this.resultSetToArrayList(temprs);
|
||||
dbConnection = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS
|
||||
resultSet = dbConnection.executeQry(query);
|
||||
list = resultSetToArrayList(resultSet);
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.WARNING, "Error while trying to read into a sqlite db." + connectionString, ex); //NON-NLS
|
||||
return Collections.<HashMap<String, Object>>emptyList();
|
||||
}
|
||||
finally {
|
||||
if (tempdbconnect != null) {
|
||||
tempdbconnect.closeConnection();
|
||||
} finally {
|
||||
if (dbConnection != null) {
|
||||
dbConnection.closeConnection();
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a List of AbstractFile objects from TSK based on sql query.
|
||||
* Converts a JDBC result set to a list of maps. Each map entry consists of
|
||||
* a column name as a key and an Object as a column value, with empty
|
||||
* strings substituted for nulls.
|
||||
*
|
||||
* @param rs is the resultset that needs to be converted to an arraylist
|
||||
* @param rs The result set.
|
||||
*
|
||||
* @return list returns the arraylist built from the converted resultset
|
||||
* @return The list of maps.
|
||||
*/
|
||||
private List<HashMap<String, Object>> resultSetToArrayList(ResultSet rs) throws SQLException {
|
||||
ResultSetMetaData md = rs.getMetaData();
|
||||
int columns = md.getColumnCount();
|
||||
List<HashMap<String, Object>> list = new ArrayList<>(50);
|
||||
List<HashMap<String, Object>> results = new ArrayList<>(50);
|
||||
while (rs.next()) {
|
||||
HashMap<String, Object> row = new HashMap<>(columns);
|
||||
for (int i = 1; i <= columns; ++i) {
|
||||
@ -272,63 +243,76 @@ abstract class Extract {
|
||||
row.put(md.getColumnName(i), rs.getObject(i));
|
||||
}
|
||||
}
|
||||
list.add(row);
|
||||
results.add(row);
|
||||
}
|
||||
|
||||
return list;
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the inheriting class
|
||||
* Gets the display name of this extractor.
|
||||
*
|
||||
* @return Gets the moduleName set in the moduleName data member
|
||||
* @return The display name.
|
||||
*/
|
||||
protected String getName() {
|
||||
return moduleName;
|
||||
protected String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the display name of the Recent Activity module.
|
||||
*
|
||||
* @return The display name.
|
||||
*/
|
||||
protected String getRAModuleName() {
|
||||
return RecentActivityExtracterModuleFactory.getModuleName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the state of foundData
|
||||
* @return
|
||||
* Gets the value of a flag indicating whether or not this extractor found
|
||||
* any data.
|
||||
*
|
||||
* @return True or false.
|
||||
*/
|
||||
public boolean foundData() {
|
||||
return dataFound;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the value of foundData
|
||||
* @param foundData
|
||||
* Sets the value of a flag indicating whether or not this extractor found
|
||||
* any data.
|
||||
*
|
||||
* @param foundData True or false.
|
||||
*/
|
||||
protected void setFoundData(boolean foundData){
|
||||
protected void setFoundData(boolean foundData) {
|
||||
dataFound = foundData;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns the current case instance
|
||||
* @return Current case instance
|
||||
* Gets the current case.
|
||||
*
|
||||
* @return The current case.
|
||||
*/
|
||||
protected Case getCurrentCase(){
|
||||
protected Case getCurrentCase() {
|
||||
return this.currentCase;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a list of attributes for a history artifact.
|
||||
* Creates a list of attributes for a web history artifact.
|
||||
*
|
||||
* @param url
|
||||
* @param accessTime Time url was accessed
|
||||
* @param referrer referred url
|
||||
* @param title title of the page
|
||||
* @param programName module name
|
||||
* @param domain domain of the url
|
||||
* @param user user that accessed url
|
||||
* @return List of BlackboardAttributes for giving attributes
|
||||
* @throws TskCoreException
|
||||
* @param url The URL, may be null.
|
||||
* @param accessTime The time the URL was accessed, may be null.
|
||||
* @param referrer The referring URL, may be null.
|
||||
* @param title Title of the returned resource, may be null.
|
||||
* @param programName The program that executed the request, may be the
|
||||
* empty string, may be null.
|
||||
* @param domain The domain of the URL, may be null.
|
||||
* @param user The user that accessed URL, may be null.
|
||||
*
|
||||
* @return The list of attributes.
|
||||
*
|
||||
* @throws TskCoreException The exception is thrown if there is an issue
|
||||
* creating the attributes.
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createHistoryAttribute(String url, Long accessTime,
|
||||
protected Collection<BlackboardAttribute> createHistoryAttributes(String url, Long accessTime,
|
||||
String referrer, String title, String programName, String domain, String user) throws TskCoreException {
|
||||
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
@ -363,17 +347,18 @@ abstract class Extract {
|
||||
|
||||
return bbattributes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a list of attributes for a cookie.
|
||||
* Creates a list of attributes for a web cookie artifact.
|
||||
*
|
||||
* @param url cookie url
|
||||
* @param creationTime cookie creation time
|
||||
* @param name cookie name
|
||||
* @param value cookie value
|
||||
* @param programName Name of the module creating the attribute
|
||||
* @param domain Domain of the URL
|
||||
* @return List of BlackboarAttributes for the passed in attributes
|
||||
* @param url The cookie url, may be null.
|
||||
* @param creationTime The cookie creation time, may be null.
|
||||
* @param name The cookie name, may be null.
|
||||
* @param value The cookie value, may be null.
|
||||
* @param programName The program that created the cookie, may be null.
|
||||
* @param domain The domain of the cookie URL, may be null.
|
||||
*
|
||||
* @return The list of attributes.
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createCookieAttributes(String url,
|
||||
Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) {
|
||||
@ -387,13 +372,13 @@ abstract class Extract {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), creationTime));
|
||||
}
|
||||
|
||||
|
||||
if (accessTime != null && accessTime != 0) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), accessTime));
|
||||
}
|
||||
|
||||
if(endTime != null && endTime != 0) {
|
||||
|
||||
if (endTime != null && endTime != 0) {
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_END,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), endTime));
|
||||
}
|
||||
@ -418,14 +403,16 @@ abstract class Extract {
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a list of bookmark attributes from the passed in parameters.
|
||||
* Creates a list of attributes for a web bookmark artifact.
|
||||
*
|
||||
* @param url Bookmark url
|
||||
* @param title Title of the bookmarked page
|
||||
* @param creationTime Date & time at which the bookmark was created
|
||||
* @param programName Name of the module creating the attribute
|
||||
* @param domain The domain of the bookmark's url
|
||||
* @return A collection of bookmark attributes
|
||||
* @param url The bookmark URL, may be null.
|
||||
* @param title The title of the bookmarked page, may be null.
|
||||
* @param creationTime The date and time at which the bookmark was created,
|
||||
* may be null.
|
||||
* @param programName The program that created the bookmark, may be null.
|
||||
* @param domain The domain of the bookmark's URL, may be null.
|
||||
*
|
||||
* @return The list of attributes.
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
@ -454,15 +441,16 @@ abstract class Extract {
|
||||
return bbattributes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a list of the attributes of a downloaded file
|
||||
/**
|
||||
* Creates a list of attributes for a web download artifact.
|
||||
*
|
||||
* @param path
|
||||
* @param url URL of the downloaded file
|
||||
* @param accessTime Time the download occurred
|
||||
* @param domain Domain of the URL
|
||||
* @param programName Name of the module creating the attribute
|
||||
* @return A collection of attributes of a downloaded file
|
||||
* @param path The path of the downloaded file, may be null.
|
||||
* @param url The URL of the downloaded file, may be null.
|
||||
* @param accessTime The time the download occurred, may be null.
|
||||
* @param domain The domain of the URL, may be null.
|
||||
* @param programName The program that downloaded the file, may be null.
|
||||
*
|
||||
* @return The list of attributes.
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
@ -496,44 +484,24 @@ abstract class Extract {
|
||||
|
||||
return bbattributes;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Creates a list of the attributes for source of a downloaded file
|
||||
* Writes a file to disk in this extractor's dedicated temp directory within
|
||||
* the Recent Activity ingest modules temp directory. The object ID of the
|
||||
* file is appended to the file name for uniqueness.
|
||||
*
|
||||
* @param url source URL of the downloaded file
|
||||
* @return A collection of attributes for source of a downloaded file
|
||||
* @param file The file.
|
||||
*
|
||||
* @return A File object that represents the file on disk.
|
||||
*
|
||||
* @throws IOException Exception thrown if there is a problem writing the
|
||||
* file to disk.
|
||||
*/
|
||||
protected Collection<BlackboardAttribute> createDownloadSourceAttributes(String url) {
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
|
||||
RecentActivityExtracterModuleFactory.getModuleName(),
|
||||
(url != null) ? url : "")); //NON-NLS
|
||||
|
||||
return bbattributes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create temporary file for the given AbstractFile. The new file will be
|
||||
* created in the temp directory for the module with a unique file name.
|
||||
*
|
||||
* @param context
|
||||
* @param file
|
||||
* @param IngestJobId The ingest job id.
|
||||
* @return Newly created copy of the AbstractFile
|
||||
* @throws IOException
|
||||
*/
|
||||
protected File createTemporaryFile(IngestJobContext context, AbstractFile file, long ingestJobId) throws IOException{
|
||||
Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(
|
||||
getCurrentCase(), getName(), ingestJobId), file.getName() + file.getId() + file.getNameExtension());
|
||||
protected File createTemporaryFile(AbstractFile file) throws IOException {
|
||||
Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(getCurrentCase(), getDisplayName(), context.getJobId()), file.getName() + file.getId() + file.getNameExtension());
|
||||
java.io.File tempFile = tempFilePath.toFile();
|
||||
|
||||
try {
|
||||
ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled);
|
||||
} catch (IOException ex) {
|
||||
throw new IOException("Error writingToFile: " + file, ex); //NON-NLS
|
||||
}
|
||||
|
||||
ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled);
|
||||
return tempFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -39,8 +39,6 @@ import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.openide.modules.InstalledFileLocator;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.ExecUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -65,7 +63,7 @@ final class ExtractEdge extends Extract {
|
||||
|
||||
private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName());
|
||||
private Content dataSource;
|
||||
private IngestJobContext context;
|
||||
private final IngestJobContext context;
|
||||
private HashMap<String, ArrayList<String>> containersTable;
|
||||
|
||||
private static final String EDGE = "Edge"; //NON-NLS
|
||||
@ -114,32 +112,31 @@ final class ExtractEdge extends Extract {
|
||||
"ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file",
|
||||
"ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file",
|
||||
"ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file",
|
||||
"ExtractEdge_Module_Name=Microsoft Edge",
|
||||
"ExtractEdge_Module_Name=Microsoft Edge Analyzer",
|
||||
"ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history",
|
||||
"Progress_Message_Edge_History=Microsoft Edge History",
|
||||
"Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks",
|
||||
"Progress_Message_Edge_Cookies=Microsoft Edge Cookies",
|
||||
})
|
||||
"Progress_Message_Edge_Cookies=Microsoft Edge Cookies",})
|
||||
|
||||
/**
|
||||
* Extract the bookmarks, cookies, downloads and history from Microsoft Edge
|
||||
*/
|
||||
ExtractEdge() {
|
||||
super(Bundle.ExtractEdge_Module_Name());
|
||||
* Extract the bookmarks, cookies, downloads and history from Microsoft Edge
|
||||
*/
|
||||
ExtractEdge(IngestJobContext context) {
|
||||
super(Bundle.ExtractEdge_Module_Name(), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getName() {
|
||||
protected String getDisplayName() {
|
||||
return Bundle.ExtractEdge_Module_Name();
|
||||
}
|
||||
|
||||
@Override
|
||||
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), EDGE, context.getJobId());
|
||||
String moduleTempResultDir = Paths.get(moduleTempDir, EDGE_RESULT_FOLDER_NAME).toString();
|
||||
|
||||
|
||||
this.dataSource = dataSource;
|
||||
this.context = context;
|
||||
this.setFoundData(false);
|
||||
|
||||
List<AbstractFile> webCacheFiles = null;
|
||||
@ -151,7 +148,7 @@ final class ExtractEdge extends Extract {
|
||||
this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_errGettingWebCacheFiles());
|
||||
LOG.log(Level.SEVERE, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS
|
||||
}
|
||||
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
@ -174,7 +171,7 @@ final class ExtractEdge extends Extract {
|
||||
LOG.log(Level.WARNING, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
@ -206,22 +203,22 @@ final class ExtractEdge extends Extract {
|
||||
* Process WebCacheV01.dat ese database file creating artifacts for cookies,
|
||||
* and history contained within.
|
||||
*
|
||||
* @param eseDumperPath Path to ESEDatabaseView.exe
|
||||
* @param webCacheFiles List of case WebCacheV01.dat files
|
||||
* @param moduleTempDir The temp directory for this module.
|
||||
* @param eseDumperPath Path to ESEDatabaseView.exe
|
||||
* @param webCacheFiles List of case WebCacheV01.dat files
|
||||
* @param moduleTempDir The temp directory for this module.
|
||||
* @param moduleTempResultDir The temp results directory for this module.
|
||||
*
|
||||
* @throws IOException
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
void processWebCacheDbFile(String eseDumperPath, List<AbstractFile> webCacheFiles, DataSourceIngestModuleProgress progressBar,
|
||||
void processWebCacheDbFile(String eseDumperPath, List<AbstractFile> webCacheFiles, DataSourceIngestModuleProgress progressBar,
|
||||
String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
|
||||
|
||||
for (AbstractFile webCacheFile : webCacheFiles) {
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
clearContainerTable();
|
||||
|
||||
//Run the dumper
|
||||
@ -245,9 +242,9 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
progressBar.progress(Bundle.Progress_Message_Edge_History());
|
||||
|
||||
|
||||
this.getHistory(webCacheFile, resultsDir);
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
@ -255,7 +252,7 @@ final class ExtractEdge extends Extract {
|
||||
}
|
||||
|
||||
progressBar.progress(Bundle.Progress_Message_Edge_Cookies());
|
||||
|
||||
|
||||
this.getCookies(webCacheFile, resultsDir);
|
||||
|
||||
} finally {
|
||||
@ -266,19 +263,18 @@ final class ExtractEdge extends Extract {
|
||||
}
|
||||
|
||||
/**
|
||||
* Process spartan.edb ese database file creating artifacts for the bookmarks
|
||||
* contained within.
|
||||
* Process spartan.edb ese database file creating artifacts for the
|
||||
* bookmarks contained within.
|
||||
*
|
||||
* @param eseDumperPath Path to ESEDatabaseViewer
|
||||
* @param spartanFiles List of the case spartan.edb files
|
||||
* @param moduleTempDir The temp directory for this module.
|
||||
* @param eseDumperPath Path to ESEDatabaseViewer
|
||||
* @param spartanFiles List of the case spartan.edb files
|
||||
* @param moduleTempDir The temp directory for this module.
|
||||
* @param moduleTempResultDir The temp results directory for this module.
|
||||
*
|
||||
* @throws IOException
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
void processSpartanDbFile(String eseDumperPath, List<AbstractFile> spartanFiles,
|
||||
String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
|
||||
|
||||
void processSpartanDbFile(String eseDumperPath, List<AbstractFile> spartanFiles, String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
|
||||
for (AbstractFile spartanFile : spartanFiles) {
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
@ -287,7 +283,7 @@ final class ExtractEdge extends Extract {
|
||||
|
||||
//Run the dumper
|
||||
String tempSpartanFileName = EDGE_WEBCACHE_PREFIX
|
||||
+ Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT;
|
||||
+ Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT;
|
||||
File tempSpartanFile = new File(moduleTempDir, tempSpartanFileName);
|
||||
|
||||
try {
|
||||
@ -320,14 +316,15 @@ final class ExtractEdge extends Extract {
|
||||
* getHistory searches the files with "container" in the file name for lines
|
||||
* with the text "Visited" in them. Note that not all of the container
|
||||
* files, if fact most of them do not, have the browser history in them.
|
||||
* @param origFile Original case file
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param resultDir Output directory of ESEDatabaseViewer
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @throws FileNotFoundException
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException {
|
||||
ArrayList<File> historyFiles = getHistoryFiles(resultDir);
|
||||
|
||||
if (historyFiles == null) {
|
||||
return;
|
||||
}
|
||||
@ -336,7 +333,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Scanner fileScanner;
|
||||
try {
|
||||
fileScanner = new Scanner(new FileInputStream(file.toString()));
|
||||
@ -353,7 +350,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String line = fileScanner.nextLine();
|
||||
if (headers == null) {
|
||||
headers = Arrays.asList(line.toLowerCase().split(","));
|
||||
@ -380,8 +377,9 @@ final class ExtractEdge extends Extract {
|
||||
/**
|
||||
* Search for bookmark files and make artifacts.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param origFile Original case file
|
||||
* @param resultDir Output directory of ESEDatabaseViewer
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
@ -425,8 +423,9 @@ final class ExtractEdge extends Extract {
|
||||
/**
|
||||
* Queries for cookie files and adds artifacts.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param origFile Original case file
|
||||
* @param resultDir Output directory of ESEDatabaseViewer
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private void getCookies(AbstractFile origFile, File resultDir) throws TskCoreException {
|
||||
@ -440,7 +439,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Scanner fileScanner;
|
||||
try {
|
||||
fileScanner = new Scanner(new FileInputStream(file.toString()));
|
||||
@ -457,7 +456,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String line = fileScanner.nextLine();
|
||||
if (headers == null) {
|
||||
headers = Arrays.asList(line.toLowerCase().split(","));
|
||||
@ -481,13 +480,14 @@ final class ExtractEdge extends Extract {
|
||||
|
||||
/**
|
||||
* Queries for download files and adds artifacts.
|
||||
*
|
||||
*
|
||||
* Leaving for future use.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param resultDir Output directory of ESEDatabaseViewer
|
||||
*
|
||||
* @throws TskCoreException
|
||||
* @throws FileNotFoundException
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
private void getDownloads(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException {
|
||||
ArrayList<File> downloadFiles = getDownloadFiles(resultDir);
|
||||
@ -500,7 +500,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
Scanner fileScanner;
|
||||
try {
|
||||
fileScanner = new Scanner(new FileInputStream(file.toString()));
|
||||
@ -516,7 +516,7 @@ final class ExtractEdge extends Extract {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
String line = fileScanner.nextLine();
|
||||
if (headers == null) {
|
||||
headers = Arrays.asList(line.toLowerCase().split(","));
|
||||
@ -535,7 +535,7 @@ final class ExtractEdge extends Extract {
|
||||
fileScanner.close();
|
||||
}
|
||||
|
||||
if(!context.dataSourceIngestIsCancelled()) {
|
||||
if (!context.dataSourceIngestIsCancelled()) {
|
||||
postArtifacts(bbartifacts);
|
||||
}
|
||||
}
|
||||
@ -544,7 +544,8 @@ final class ExtractEdge extends Extract {
|
||||
/**
|
||||
* Find the location of ESEDatabaseViewer.exe
|
||||
*
|
||||
* @return Absolute path to ESEDatabaseViewer.exe or null if the file is not found
|
||||
* @return Absolute path to ESEDatabaseViewer.exe or null if the file is not
|
||||
* found
|
||||
*/
|
||||
private String getPathForESEDumper() {
|
||||
Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME);
|
||||
@ -561,6 +562,7 @@ final class ExtractEdge extends Extract {
|
||||
* Finds all of the WebCacheV01.dat files in the case
|
||||
*
|
||||
* @return A list of WebCacheV01.dat files, possibly empty if none are found
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private List<AbstractFile> fetchWebCacheDBFiles() throws TskCoreException {
|
||||
@ -573,6 +575,7 @@ final class ExtractEdge extends Extract {
|
||||
* Finds all of the spartan.edb files in the case
|
||||
*
|
||||
* @return A list of spartan files, possibly empty if none are found
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private List<AbstractFile> fetchSpartanDBFiles() throws TskCoreException {
|
||||
@ -587,9 +590,10 @@ final class ExtractEdge extends Extract {
|
||||
* Each table in the ese database will be dumped as a comma separated file
|
||||
* named <tableName>.csv
|
||||
*
|
||||
* @param dumperPath Path to ESEDatabaseView.exe
|
||||
* @param dumperPath Path to ESEDatabaseView.exe
|
||||
* @param inputFilePath Path to ese database file to be dumped
|
||||
* @param outputDir Output directory for dumper
|
||||
* @param outputDir Output directory for dumper
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
private void executeDumper(String dumperPath, String inputFilePath,
|
||||
@ -619,9 +623,11 @@ final class ExtractEdge extends Extract {
|
||||
* table.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of history table
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of history table
|
||||
*
|
||||
* @return BlackboardArtifact representing one history table entry
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
|
||||
@ -638,9 +644,9 @@ final class ExtractEdge extends Extract {
|
||||
String accessTime = rowSplit[index].trim();
|
||||
Long ftime = parseTimestamp(accessTime);
|
||||
|
||||
return createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, createHistoryAttribute(url, ftime,
|
||||
return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, createHistoryAttributes(url, ftime,
|
||||
null, null,
|
||||
this.getName(),
|
||||
this.getDisplayName(),
|
||||
NetworkUtils.extractDomain(url), user));
|
||||
}
|
||||
|
||||
@ -648,9 +654,11 @@ final class ExtractEdge extends Extract {
|
||||
* Create a BlackboardArtifact for the given row from the Edge cookie table.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of cookie table
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of cookie table
|
||||
*
|
||||
* @return BlackboardArtifact representing one cookie table entry
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
|
||||
@ -664,7 +672,7 @@ final class ExtractEdge extends Extract {
|
||||
String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim());
|
||||
String url = flipDomain(domain);
|
||||
|
||||
return createArtifactWithAttributes(TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url)));
|
||||
return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getDisplayName(), NetworkUtils.extractDomain(url)));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -675,17 +683,19 @@ final class ExtractEdge extends Extract {
|
||||
* it apart.
|
||||
*
|
||||
* @param origFile Original case file
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of download table
|
||||
* @param headers List of table headers
|
||||
* @param line CSV string representing a row of download table
|
||||
*
|
||||
* @return BlackboardArtifact representing one download table entry
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
|
||||
BlackboardArtifact bbart = null;
|
||||
|
||||
|
||||
String[] lineSplit = line.split(","); // NON-NLS
|
||||
String rheader = lineSplit[headers.indexOf(EDGE_HEAD_RESPONSEHEAD)];
|
||||
|
||||
|
||||
return bbart;
|
||||
}
|
||||
|
||||
@ -696,9 +706,12 @@ final class ExtractEdge extends Extract {
|
||||
* Note: The "Favorites" table does not have a "Creation Time"
|
||||
*
|
||||
* @param origFile File the table came from ie spartan.edb
|
||||
* @param headers List of table column headers
|
||||
* @param line The line or row of the table to parse
|
||||
* @return BlackboardArtifact representation of the passed in line\table row or null if no Bookmark is found
|
||||
* @param headers List of table column headers
|
||||
* @param line The line or row of the table to parse
|
||||
*
|
||||
* @return BlackboardArtifact representation of the passed in line\table row
|
||||
* or null if no Bookmark is found
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
|
||||
@ -711,28 +724,27 @@ final class ExtractEdge extends Extract {
|
||||
if (url.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null,
|
||||
this.getName(), NetworkUtils.extractDomain(url)));
|
||||
|
||||
return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null,
|
||||
this.getDisplayName(), NetworkUtils.extractDomain(url)));
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Attempt to parse the timestamp.
|
||||
*
|
||||
*
|
||||
* ESEDatabaseView makes timestamps based on the locale of the machine so
|
||||
* they will not always be in the expected format. Additionally, the format
|
||||
* used in the database output does not appear to match the default format
|
||||
* using DateFormat.SHORT. Therefore, if the default US format doesn't work,
|
||||
* we will attempt to determine the correct pattern to use and save any
|
||||
* working pattern for the next attempt.
|
||||
*
|
||||
*
|
||||
* @param timeStr The date/time string to parse
|
||||
*
|
||||
*
|
||||
* @return The epoch time as a Long or null if it could not be parsed.
|
||||
*/
|
||||
private Long parseTimestamp(String timeStr) {
|
||||
|
||||
|
||||
// If we had a pattern that worked on the last date, use it again.
|
||||
if (previouslyValidDateFormat != null) {
|
||||
try {
|
||||
@ -741,7 +753,7 @@ final class ExtractEdge extends Extract {
|
||||
// Continue on to format detection
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Try the default US pattern
|
||||
try {
|
||||
SimpleDateFormat usDateFormat = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); //NON-NLS
|
||||
@ -752,12 +764,12 @@ final class ExtractEdge extends Extract {
|
||||
} catch (ParseException ex) {
|
||||
// Continue on to format detection
|
||||
}
|
||||
|
||||
|
||||
// This generally doesn't match the data in the file but can give information on whether
|
||||
// the month or day is first.
|
||||
boolean monthFirstFromLocale = true;
|
||||
String localeDatePattern = ((SimpleDateFormat) DateFormat.getDateInstance(
|
||||
DateFormat.SHORT, Locale.getDefault())).toPattern();
|
||||
DateFormat.SHORT, Locale.getDefault())).toPattern();
|
||||
if (localeDatePattern.startsWith("d")) {
|
||||
monthFirstFromLocale = false;
|
||||
}
|
||||
@ -770,27 +782,27 @@ final class ExtractEdge extends Extract {
|
||||
if (matcher.find()) {
|
||||
int firstVal = Integer.parseInt(matcher.group(1));
|
||||
int secondVal = Integer.parseInt(matcher.group(2));
|
||||
|
||||
|
||||
if (firstVal > 12) {
|
||||
monthFirst = false;
|
||||
monthFirst = false;
|
||||
} else if (secondVal > 12) {
|
||||
monthFirst = true;
|
||||
}
|
||||
}
|
||||
// Otherwise keep the setting from the locale
|
||||
}
|
||||
|
||||
|
||||
// See if the time has AM/PM attached
|
||||
boolean hasAmPm = false;
|
||||
if (timeStr.endsWith("M") || timeStr.endsWith("m")) {
|
||||
hasAmPm = true;
|
||||
}
|
||||
|
||||
|
||||
// See if the date appears to use forward slashes. If not, assume '.' is being used.
|
||||
boolean hasSlashes = false;
|
||||
if (timeStr.contains("/")) {
|
||||
hasSlashes = true;
|
||||
}
|
||||
|
||||
|
||||
// Make our best guess at the pattern
|
||||
String dateFormatPattern;
|
||||
if (monthFirst) {
|
||||
@ -800,19 +812,19 @@ final class ExtractEdge extends Extract {
|
||||
dateFormatPattern = "MM.dd.yyyy ";
|
||||
}
|
||||
} else {
|
||||
if (hasSlashes) {
|
||||
if (hasSlashes) {
|
||||
dateFormatPattern = "dd/MM/yyyy ";
|
||||
} else {
|
||||
dateFormatPattern = "dd.MM.yyyy ";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (hasAmPm) {
|
||||
dateFormatPattern += "hh:mm:ss a";
|
||||
} else {
|
||||
dateFormatPattern += "HH:mm:ss";
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
SimpleDateFormat dateFormat = new SimpleDateFormat(dateFormatPattern); //NON-NLS
|
||||
dateFormat.setLenient(false); // Fail if month or day are out of range
|
||||
@ -829,6 +841,7 @@ final class ExtractEdge extends Extract {
|
||||
* Converts a space separated string of hex values to ascii characters.
|
||||
*
|
||||
* @param hexString
|
||||
*
|
||||
* @return "decoded" string or null if a non-hex value was found
|
||||
*/
|
||||
private String hexToChar(String hexString) {
|
||||
@ -858,6 +871,7 @@ final class ExtractEdge extends Extract {
|
||||
* there to weed out the "junk".
|
||||
*
|
||||
* @param domain
|
||||
*
|
||||
* @return Correct domain string
|
||||
*/
|
||||
private String flipDomain(String domain) {
|
||||
@ -888,6 +902,7 @@ final class ExtractEdge extends Extract {
|
||||
* them.
|
||||
*
|
||||
* @param resultDir Path to ESEDatabaseViewer output
|
||||
*
|
||||
* @return List of download table files
|
||||
*/
|
||||
private ArrayList<File> getDownloadFiles(File resultDir) throws FileNotFoundException {
|
||||
@ -898,7 +913,9 @@ final class ExtractEdge extends Extract {
|
||||
* Returns a list the container files that have history information in them.
|
||||
*
|
||||
* @param resultDir Path to ESEDatabaseViewer output
|
||||
*
|
||||
* @return List of history table files
|
||||
*
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
private ArrayList<File> getHistoryFiles(File resultDir) throws FileNotFoundException {
|
||||
@ -909,8 +926,11 @@ final class ExtractEdge extends Extract {
|
||||
* Returns a list of the containers files that are of the given type string
|
||||
*
|
||||
* @param resultDir Path to ESEDatabaseViewer output
|
||||
* @param type Type of table files
|
||||
* @return List of table files returns null if no files of that type are found
|
||||
* @param type Type of table files
|
||||
*
|
||||
* @return List of table files returns null if no files of that type are
|
||||
* found
|
||||
*
|
||||
* @throws FileNotFoundException
|
||||
*/
|
||||
private ArrayList<File> getContainerFiles(File resultDir, String type) throws FileNotFoundException {
|
||||
@ -938,7 +958,9 @@ final class ExtractEdge extends Extract {
|
||||
* files.
|
||||
*
|
||||
* @param resultDir Path to ESEDatabaseViewer output
|
||||
* @return Hashmap with Key representing the table type, the value is a list of table ids for that type
|
||||
*
|
||||
* @return Hashmap with Key representing the table type, the value is a list
|
||||
* of table ids for that type
|
||||
*/
|
||||
private HashMap<String, ArrayList<String>> getContainerIDTable(File resultDir) throws FileNotFoundException {
|
||||
|
||||
@ -975,11 +997,11 @@ final class ExtractEdge extends Extract {
|
||||
|
||||
return containersTable;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Clears the containerTable
|
||||
*/
|
||||
private void clearContainerTable(){
|
||||
private void clearContainerTable() {
|
||||
containersTable = null;
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,6 @@ import java.util.Scanner;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.modules.InstalledFileLocator;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
@ -72,7 +71,7 @@ class ExtractIE extends Extract {
|
||||
private static final String RESOURCE_URL_PREFIX = "res://";
|
||||
private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
|
||||
private Content dataSource;
|
||||
private IngestJobContext context;
|
||||
private final IngestJobContext context;
|
||||
|
||||
@Messages({
|
||||
"Progress_Message_IE_History=IE History",
|
||||
@ -83,30 +82,30 @@ class ExtractIE extends Extract {
|
||||
"Progress_Message_IE_AutoFill=IE Auto Fill",
|
||||
"Progress_Message_IE_Logins=IE Logins",})
|
||||
|
||||
ExtractIE() {
|
||||
super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"));
|
||||
ExtractIE(IngestJobContext context) {
|
||||
super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"), context);
|
||||
JAVA_PATH = PlatformUtil.getJavaPath();
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), "IE", context.getJobId());
|
||||
String moduleTempResultsDir = Paths.get(moduleTempDir, "results").toString();
|
||||
|
||||
|
||||
this.dataSource = dataSource;
|
||||
this.context = context;
|
||||
dataFound = false;
|
||||
|
||||
progressBar.progress(Bundle.Progress_Message_IE_Bookmarks());
|
||||
this.getBookmark();
|
||||
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
progressBar.progress(Bundle.Progress_Message_IE_Cookies());
|
||||
this.getCookie();
|
||||
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
@ -127,7 +126,7 @@ class ExtractIE extends Extract {
|
||||
logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getBookmark.errMsg.errGettingBookmarks",
|
||||
this.getName()));
|
||||
this.getDisplayName()));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -171,13 +170,13 @@ class ExtractIE extends Extract {
|
||||
}
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, fav, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId() ), ex);
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId()), ex);
|
||||
}
|
||||
}
|
||||
|
||||
if(!context.dataSourceIngestIsCancelled()) {
|
||||
if (!context.dataSourceIngestIsCancelled()) {
|
||||
postArtifacts(bbartifacts);
|
||||
}
|
||||
}
|
||||
@ -199,12 +198,12 @@ class ExtractIE extends Extract {
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getName(),
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getDisplayName(),
|
||||
fav.getName()));
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getName(),
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getDisplayName(),
|
||||
fav.getName()));
|
||||
} finally {
|
||||
try {
|
||||
@ -228,7 +227,7 @@ class ExtractIE extends Extract {
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Error getting cookie files for IE"); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getName()));
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getDisplayName()));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -254,7 +253,7 @@ class ExtractIE extends Extract {
|
||||
logger.log(Level.WARNING, "Error reading bytes of Internet Explorer cookie.", ex); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errReadingIECookie",
|
||||
this.getName(), cookiesFile.getName()));
|
||||
this.getDisplayName(), cookiesFile.getName()));
|
||||
continue;
|
||||
}
|
||||
String cookieString = new String(t);
|
||||
@ -285,21 +284,23 @@ class ExtractIE extends Extract {
|
||||
}
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId() ), ex);
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId()), ex);
|
||||
}
|
||||
}
|
||||
|
||||
if(!context.dataSourceIngestIsCancelled()) {
|
||||
if (!context.dataSourceIngestIsCancelled()) {
|
||||
postArtifacts(bbartifacts);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Locates index.dat files, runs Pasco on them, and creates artifacts.
|
||||
* @param moduleTempDir The path to the module temp directory.
|
||||
* @param moduleTempResultsDir The path to the module temp results directory.
|
||||
*
|
||||
* @param moduleTempDir The path to the module temp directory.
|
||||
* @param moduleTempResultsDir The path to the module temp results
|
||||
* directory.
|
||||
*/
|
||||
private void getHistory(String moduleTempDir, String moduleTempResultsDir) {
|
||||
logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); //NON-NLS
|
||||
@ -308,7 +309,7 @@ class ExtractIE extends Extract {
|
||||
final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); //NON-NLS
|
||||
if (pascoRoot == null) {
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getName()));
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getDisplayName()));
|
||||
logger.log(Level.SEVERE, "Error finding pasco program "); //NON-NLS
|
||||
return;
|
||||
}
|
||||
@ -329,7 +330,7 @@ class ExtractIE extends Extract {
|
||||
indexFiles = fileManager.findFiles(dataSource, "index.dat"); //NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errGettingHistFiles",
|
||||
this.getName()));
|
||||
this.getDisplayName()));
|
||||
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); //NON-NLS
|
||||
return;
|
||||
}
|
||||
@ -363,7 +364,7 @@ class ExtractIE extends Extract {
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.WARNING, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getName(),
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getDisplayName(),
|
||||
datFile.getAbsolutePath()));
|
||||
continue;
|
||||
}
|
||||
@ -391,11 +392,11 @@ class ExtractIE extends Extract {
|
||||
} else {
|
||||
logger.log(Level.WARNING, "pasco execution failed on: {0}", filename); //NON-NLS
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getName()));
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getDisplayName()));
|
||||
}
|
||||
}
|
||||
|
||||
if(!context.dataSourceIngestIsCancelled()) {
|
||||
if (!context.dataSourceIngestIsCancelled()) {
|
||||
postArtifacts(bbartifacts);
|
||||
}
|
||||
}
|
||||
@ -403,16 +404,15 @@ class ExtractIE extends Extract {
|
||||
/**
|
||||
* Execute pasco on a single file that has been saved to disk.
|
||||
*
|
||||
* @param indexFilePath Path to local index.dat file to analyze
|
||||
* @param outputFileName Name of file to save output to
|
||||
* @param indexFilePath Path to local index.dat file to analyze
|
||||
* @param outputFileName Name of file to save output to
|
||||
* @param moduleTempResultsDir the path to the module temp directory.
|
||||
*
|
||||
* @return false on error
|
||||
*/
|
||||
@Messages({
|
||||
"# {0} - sub module name",
|
||||
"ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history",
|
||||
})
|
||||
"# {0} - sub module name",
|
||||
"ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history",})
|
||||
private boolean executePasco(String indexFilePath, String outputFileName, String moduleTempResultsDir) {
|
||||
boolean success = true;
|
||||
try {
|
||||
@ -443,7 +443,7 @@ class ExtractIE extends Extract {
|
||||
// @@@ Investigate use of history versus cache as type.
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, "Error executing Pasco to process Internet Explorer web history", ex); //NON-NLS
|
||||
addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getName()));
|
||||
addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getDisplayName()));
|
||||
success = false;
|
||||
}
|
||||
return success;
|
||||
@ -452,9 +452,9 @@ class ExtractIE extends Extract {
|
||||
/**
|
||||
* parse Pasco output and create artifacts
|
||||
*
|
||||
* @param origFile Original index.dat file that was analyzed to
|
||||
* get this output
|
||||
* @param pascoOutputFileName name of pasco output file
|
||||
* @param origFile Original index.dat file that was analyzed to
|
||||
* get this output
|
||||
* @param pascoOutputFileName name of pasco output file
|
||||
* @param moduleTempResultsDir the path to the module temp directory.
|
||||
*
|
||||
* @return A collection of created artifacts
|
||||
@ -467,7 +467,7 @@ class ExtractIE extends Extract {
|
||||
File file = new File(fnAbs);
|
||||
if (file.exists() == false) {
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getName(),
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getDisplayName(),
|
||||
file.getName()));
|
||||
logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath()); //NON-NLS
|
||||
return bbartifacts;
|
||||
@ -484,7 +484,7 @@ class ExtractIE extends Extract {
|
||||
fileScanner = new Scanner(new FileInputStream(file.toString()));
|
||||
} catch (FileNotFoundException ex) {
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getName(),
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getDisplayName(),
|
||||
file.getName()));
|
||||
logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); //NON-NLS
|
||||
return bbartifacts;
|
||||
@ -562,7 +562,7 @@ class ExtractIE extends Extract {
|
||||
} catch (ParseException e) {
|
||||
this.addErrorMessage(
|
||||
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsingEntry",
|
||||
this.getName()));
|
||||
this.getDisplayName()));
|
||||
logger.log(Level.WARNING, String.format("Error parsing Pasco results, may have partial processing of corrupt file (id=%d)", origFile.getId()), e); //NON-NLS
|
||||
}
|
||||
}
|
||||
@ -589,9 +589,9 @@ class ExtractIE extends Extract {
|
||||
RecentActivityExtracterModuleFactory.getModuleName(), user));
|
||||
|
||||
try {
|
||||
bbartifacts.add(createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, bbattributes));
|
||||
bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, bbattributes));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_HISTORY.getDisplayName(), origFile.getId() ), ex);
|
||||
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_HISTORY.getDisplayName(), origFile.getId()), ex);
|
||||
}
|
||||
}
|
||||
fileScanner.close();
|
||||
|
@ -53,16 +53,13 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Extract the LNK files from the jumplists and save them to ModuleOutput\RecentActivity\Jumplists
|
||||
* and then add them back into the case as a dervived file.
|
||||
* Extract the LNK files from the jumplists and save them to
|
||||
* ModuleOutput\RecentActivity\Jumplists and then add them back into the case as
|
||||
* a dervived file.
|
||||
*/
|
||||
final class ExtractJumpLists extends Extract {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ExtractJumpLists.class.getName());
|
||||
|
||||
private IngestJobContext context;
|
||||
|
||||
private static final String JUMPLIST_TSK_COMMENT = "Jumplist File";
|
||||
private static final String RA_DIR_NAME = "RecentActivity"; //NON-NLS
|
||||
private static final String AUTOMATIC_DESTINATIONS_FILE_DIRECTORY = "%/AppData/Roaming/Microsoft/Windows/Recent/AutomaticDestinations/";
|
||||
private static final String JUMPLIST_DIR_NAME = "jumplists"; //NON-NLS
|
||||
@ -70,26 +67,25 @@ final class ExtractJumpLists extends Extract {
|
||||
private String moduleName;
|
||||
private FileManager fileManager;
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private final IngestJobContext context;
|
||||
|
||||
@Messages({
|
||||
"Jumplist_module_name=Windows Jumplist Extractor",
|
||||
"Jumplist_module_name=Windows Jumplist Analyzer",
|
||||
"Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis."
|
||||
})
|
||||
ExtractJumpLists() {
|
||||
super(Bundle.Jumplist_module_name());
|
||||
ExtractJumpLists(IngestJobContext context) {
|
||||
super(Bundle.Jumplist_module_name(), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
|
||||
this.context = context;
|
||||
void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
moduleName = Bundle.Jumplist_module_name();
|
||||
fileManager = currentCase.getServices().getFileManager();
|
||||
fileManager = currentCase.getServices().getFileManager();
|
||||
long ingestJobId = context.getJobId();
|
||||
|
||||
String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME , ingestJobId);
|
||||
String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId);
|
||||
List<AbstractFile> jumpListFiles = extractJumplistFiles(dataSource, ingestJobId, baseRaTempPath);
|
||||
|
||||
if (jumpListFiles.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
@ -102,26 +98,28 @@ final class ExtractJumpLists extends Extract {
|
||||
String derivedPath = null;
|
||||
String baseRaModPath = RAImageIngestModule.getRAOutputPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId);
|
||||
for (AbstractFile jumplistFile : jumpListFiles) {
|
||||
if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..") &&
|
||||
!jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) {
|
||||
String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString();
|
||||
String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
|
||||
derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
|
||||
if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..")
|
||||
&& !jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) {
|
||||
String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString();
|
||||
String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
|
||||
derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
|
||||
File jlDir = new File(moduleOutPath);
|
||||
if (jlDir.exists() == false) {
|
||||
boolean dirMade = jlDir.mkdirs();
|
||||
if (!dirMade) {
|
||||
logger.log(Level.WARNING, "Error creating directory to store Jumplist LNK files %s", moduleOutPath); //NON-NLS
|
||||
continue;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
derivedFiles.addAll(extractLnkFiles(jlFile, moduleOutPath, jumplistFile, derivedPath));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// notify listeners of new files and schedule for analysis
|
||||
progressBar.progress(String.format(Bundle.Jumplist_adding_extracted_files_msg(), derivedFiles.size()));
|
||||
derivedFiles.forEach((derived) -> { services.fireModuleContentEvent(new ModuleContentEvent(derived)); });
|
||||
derivedFiles.forEach((derived) -> {
|
||||
services.fireModuleContentEvent(new ModuleContentEvent(derived));
|
||||
});
|
||||
context.addFilesToJob(derivedFiles);
|
||||
|
||||
}
|
||||
@ -132,8 +130,8 @@ final class ExtractJumpLists extends Extract {
|
||||
* @return - list of jumplist abstractfiles or empty list
|
||||
*/
|
||||
private List<AbstractFile> extractJumplistFiles(Content dataSource, Long ingestJobId, String baseRaTempPath) {
|
||||
List<AbstractFile> jumpListFiles = new ArrayList<>();;
|
||||
List<AbstractFile> tempJumpListFiles = new ArrayList<>();;
|
||||
List<AbstractFile> jumpListFiles = new ArrayList<>();;
|
||||
List<AbstractFile> tempJumpListFiles = new ArrayList<>();;
|
||||
|
||||
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
|
||||
|
||||
@ -141,22 +139,22 @@ final class ExtractJumpLists extends Extract {
|
||||
tempJumpListFiles = fileManager.findFiles(dataSource, "%", AUTOMATIC_DESTINATIONS_FILE_DIRECTORY); //NON-NLS
|
||||
if (!tempJumpListFiles.isEmpty()) {
|
||||
jumpListFiles.addAll(tempJumpListFiles);
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, "Unable to find jumplist files.", ex); //NON-NLS
|
||||
return jumpListFiles; // No need to continue
|
||||
}
|
||||
|
||||
|
||||
for (AbstractFile jumpListFile : jumpListFiles) {
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return jumpListFiles;
|
||||
}
|
||||
|
||||
if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..") &&
|
||||
!jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) {
|
||||
|
||||
if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..")
|
||||
&& !jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) {
|
||||
String fileName = jumpListFile.getName() + "_" + jumpListFile.getId();
|
||||
String jlFile = Paths.get(baseRaTempPath, fileName).toString();
|
||||
String jlFile = Paths.get(baseRaTempPath, fileName).toString();
|
||||
try {
|
||||
ContentUtils.writeToFile(jumpListFile, new File(jlFile));
|
||||
} catch (IOException ex) {
|
||||
@ -168,81 +166,80 @@ final class ExtractJumpLists extends Extract {
|
||||
return jumpListFiles;
|
||||
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Read each jumplist file and extract the lnk files to moduleoutput
|
||||
*/
|
||||
* Read each jumplist file and extract the lnk files to moduleoutput
|
||||
*/
|
||||
private List<DerivedFile> extractLnkFiles(String jumpListFile, String moduleOutPath, AbstractFile jumpListAbsFile, String derivedPath) {
|
||||
|
||||
|
||||
List<DerivedFile> derivedFiles = new ArrayList<>();
|
||||
DerivedFile derivedFile;
|
||||
String lnkFileName = "";
|
||||
|
||||
|
||||
try (POIFSFileSystem fs = new POIFSFileSystem(new File(jumpListFile))) {
|
||||
DirectoryEntry root = fs.getRoot();
|
||||
for (Entry entry : root) {
|
||||
if (entry instanceof DirectoryEntry) {
|
||||
//If this data structure needed to recurse this is where it would do it but jumplists do not need to at this time
|
||||
continue;
|
||||
} else if (entry instanceof DocumentEntry) {
|
||||
String jmpListFileName = entry.getName();
|
||||
int fileSize = ((DocumentEntry) entry).getSize();
|
||||
|
||||
if (fileSize > 0) {
|
||||
try (DocumentInputStream stream = fs.createDocumentInputStream(jmpListFileName)) {
|
||||
byte[] buffer = new byte[stream.available()];
|
||||
stream.read(buffer);
|
||||
DirectoryEntry root = fs.getRoot();
|
||||
for (Entry entry : root) {
|
||||
if (entry instanceof DirectoryEntry) {
|
||||
//If this data structure needed to recurse this is where it would do it but jumplists do not need to at this time
|
||||
continue;
|
||||
} else if (entry instanceof DocumentEntry) {
|
||||
String jmpListFileName = entry.getName();
|
||||
int fileSize = ((DocumentEntry) entry).getSize();
|
||||
|
||||
JLnkParser lnkParser = new JLnkParser(fs.createDocumentInputStream(jmpListFileName), fileSize);
|
||||
JLNK lnk = lnkParser.parse();
|
||||
lnkFileName = lnk.getBestName() + ".lnk";
|
||||
File targetFile = new File(moduleOutPath + File.separator + entry.getName() + "-" + lnkFileName);
|
||||
String relativePath = Case.getCurrentCase().getModuleOutputDirectoryRelativePath();
|
||||
String derivedFileName = Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + File.separator + derivedPath + File.separator + entry.getName() + "-" + lnkFileName;
|
||||
OutputStream outStream = new FileOutputStream(targetFile);
|
||||
outStream.write(buffer);
|
||||
outStream.close();
|
||||
derivedFile = fileManager.addDerivedFile(lnkFileName, derivedFileName,
|
||||
fileSize,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0, // TBD
|
||||
true,
|
||||
jumpListAbsFile,
|
||||
"",
|
||||
moduleName,
|
||||
VERSION_NUMBER,
|
||||
"",
|
||||
TskData.EncodingType.NONE);
|
||||
derivedFiles.add(derivedFile);
|
||||
if (fileSize > 0) {
|
||||
try (DocumentInputStream stream = fs.createDocumentInputStream(jmpListFileName)) {
|
||||
byte[] buffer = new byte[stream.available()];
|
||||
stream.read(buffer);
|
||||
|
||||
} catch (IOException | JLnkParserException ex) {
|
||||
logger.log(Level.WARNING, String.format("No such document, or the Entry represented by documentName is not a DocumentEntry link file is %s", jumpListFile), ex); //NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error trying to add dervived file %s", lnkFileName), ex); //NON-NLS
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
// There is some type of corruption within the file that cannot be handled, ignoring it and moving on to next file
|
||||
// in the jumplist.
|
||||
logger.log(Level.WARNING, String.format("Error parsing the the jumplist file %s", jumpListFile), ex); //NON-NLS
|
||||
}
|
||||
JLnkParser lnkParser = new JLnkParser(fs.createDocumentInputStream(jmpListFileName), fileSize);
|
||||
JLNK lnk = lnkParser.parse();
|
||||
lnkFileName = lnk.getBestName() + ".lnk";
|
||||
File targetFile = new File(moduleOutPath + File.separator + entry.getName() + "-" + lnkFileName);
|
||||
String relativePath = Case.getCurrentCase().getModuleOutputDirectoryRelativePath();
|
||||
String derivedFileName = Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + File.separator + derivedPath + File.separator + entry.getName() + "-" + lnkFileName;
|
||||
OutputStream outStream = new FileOutputStream(targetFile);
|
||||
outStream.write(buffer);
|
||||
outStream.close();
|
||||
derivedFile = fileManager.addDerivedFile(lnkFileName, derivedFileName,
|
||||
fileSize,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0, // TBD
|
||||
true,
|
||||
jumpListAbsFile,
|
||||
"",
|
||||
moduleName,
|
||||
VERSION_NUMBER,
|
||||
"",
|
||||
TskData.EncodingType.NONE);
|
||||
derivedFiles.add(derivedFile);
|
||||
|
||||
} catch (IOException | JLnkParserException ex) {
|
||||
logger.log(Level.WARNING, String.format("No such document, or the Entry represented by documentName is not a DocumentEntry link file is %s", jumpListFile), ex); //NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error trying to add dervived file %s", lnkFileName), ex); //NON-NLS
|
||||
} catch (IndexOutOfBoundsException ex) {
|
||||
// There is some type of corruption within the file that cannot be handled, ignoring it and moving on to next file
|
||||
// in the jumplist.
|
||||
logger.log(Level.WARNING, String.format("Error parsing the the jumplist file %s", jumpListFile), ex); //NON-NLS
|
||||
}
|
||||
} else {
|
||||
}
|
||||
} else {
|
||||
// currently, either an Entry is a DirectoryEntry or a DocumentEntry,
|
||||
// but in the future, there may be other entry subinterfaces.
|
||||
// The internal data structure certainly allows for a lot more entry types.
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch (NotOLE2FileException | EmptyFileException ex1) {
|
||||
logger.log(Level.WARNING, String.format("Error file not a valid OLE2 Document $s", jumpListFile)); //NON-NLS
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error lnk parsing the file to get recent files $s", jumpListFile), ex); //NON-NLS
|
||||
}
|
||||
|
||||
return derivedFiles;
|
||||
|
||||
}
|
||||
} catch (NotOLE2FileException | EmptyFileException ex1) {
|
||||
logger.log(Level.WARNING, String.format("Error file not a valid OLE2 Document $s", jumpListFile)); //NON-NLS
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error lnk parsing the file to get recent files $s", jumpListFile), ex); //NON-NLS
|
||||
}
|
||||
|
||||
return derivedFiles;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Copyright 2019-2021 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -26,7 +26,6 @@ import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
@ -40,8 +39,8 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
* Create OS INFO artifacts for the Operating Systems believed to be present on
|
||||
* the data source.
|
||||
*/
|
||||
@Messages({"ExtractOs.parentModuleName=Recent Activity",
|
||||
"ExtractOS_progressMessage=Checking for OS"})
|
||||
@Messages({"ExtractOs.displayName=OS Info Analyzer",
|
||||
"ExtractOS_progressMessage=Checking for OS"})
|
||||
class ExtractOs extends Extract {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ExtractOs.class.getName());
|
||||
@ -64,16 +63,22 @@ class ExtractOs extends Extract {
|
||||
private static final String LINUX_UBUNTU_PATH = "/etc/lsb-release";
|
||||
|
||||
private Content dataSource;
|
||||
|
||||
private final IngestJobContext context;
|
||||
|
||||
ExtractOs(IngestJobContext context) {
|
||||
super(Bundle.ExtractOs_displayName(), context);
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@Override
|
||||
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
|
||||
void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
|
||||
this.dataSource = dataSource;
|
||||
try {
|
||||
progressBar.progress(Bundle.ExtractOS_progressMessage());
|
||||
for (OS_TYPE value : OS_TYPE.values()) {
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
checkForOSFiles(value);
|
||||
}
|
||||
@ -100,9 +105,9 @@ class ExtractOs extends Extract {
|
||||
//if the os info program name is not empty create an os info artifact on the first of the files found
|
||||
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
|
||||
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME,
|
||||
Bundle.ExtractOs_parentModuleName(),
|
||||
getRAModuleName(),
|
||||
osType.getOsInfoLabel())); //NON-NLS
|
||||
postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_INFO, file, bbattributes));
|
||||
postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, file, bbattributes));
|
||||
}
|
||||
}
|
||||
|
||||
@ -116,7 +121,7 @@ class ExtractOs extends Extract {
|
||||
* @return the first AbstractFile found which matched a specified path to
|
||||
* search for
|
||||
*/
|
||||
private AbstractFile getFirstFileFound(List<String> pathsToSearchFor) throws TskCoreException{
|
||||
private AbstractFile getFirstFileFound(List<String> pathsToSearchFor) throws TskCoreException {
|
||||
for (String filePath : pathsToSearchFor) {
|
||||
List<AbstractFile> files = currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(filePath), FilenameUtils.getPath(filePath));
|
||||
if (!files.isEmpty()) {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user