Merge pull request #7403 from sleuthkit/develop

Merge develop
This commit is contained in:
Ann Priestman 2021-11-04 10:17:06 -04:00 committed by GitHub
commit 2fdfd83d6f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
120 changed files with 3062 additions and 2967 deletions

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2015-2019 Basis Technology Corp. * Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -56,7 +56,7 @@ public final class Blackboard implements Closeable {
@Deprecated @Deprecated
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException { public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
try { try {
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, ""); Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "", null);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) { } catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
throw new BlackboardException(ex.getMessage(), ex); throw new BlackboardException(ex.getMessage(), ex);
} }
@ -117,6 +117,7 @@ public final class Blackboard implements Closeable {
* @deprecated Do not use. * @deprecated Do not use.
*/ */
@Deprecated @Deprecated
@Override
public void close() throws IOException { public void close() throws IOException {
/* /*
* No-op maintained for backwards compatibility. Clients should not * No-op maintained for backwards compatibility. Clients should not

View File

@ -761,8 +761,7 @@ public final class CaseEventListener implements PropertyChangeListener {
BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score, BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score,
null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult(); null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult();
try { try {
// index the artifact for keyword search blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null);
blackboard.postArtifact(newAnalysisResult, MODULE_NAME);
break; break;
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS

View File

@ -256,10 +256,12 @@ public class IngestEventsListener {
} }
/** /**
* Create a "previously seen" hit for a device which was previously seen * Create a "previously seen" hit for a device which was previously seen in
* in the central repository. NOTE: Artifacts that are too common will be skipped. * the central repository. NOTE: Artifacts that are too common will be
* skipped.
* *
* @param originalArtifact the artifact to create the "previously seen" item for * @param originalArtifact the artifact to create the "previously seen" item
* for
* @param caseDisplayNames the case names the artifact was previously seen * @param caseDisplayNames the case names the artifact was previously seen
* in * in
* @param aType The correlation type. * @param aType The correlation type.
@ -303,11 +305,11 @@ public class IngestEventsListener {
} }
/** /**
* Create a "previously unseen" hit for an application which was never seen in * Create a "previously unseen" hit for an application which was never seen
* the central repository. * in the central repository.
* *
* @param originalArtifact the artifact to create the "previously unseen" item * @param originalArtifact the artifact to create the "previously unseen"
* for * item for
* @param aType The correlation type. * @param aType The correlation type.
* @param value The correlation value. * @param value The correlation value.
*/ */
@ -329,8 +331,10 @@ public class IngestEventsListener {
* @param newArtifactType Type of artifact to create. * @param newArtifactType Type of artifact to create.
* @param originalArtifact Artifact in current case we want to flag * @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new artifact * @param attributesForNewArtifact Attributes to assign to the new artifact
* @param configuration The configuration to be specified for the new artifact hit * @param configuration The configuration to be specified for the
* @param score sleuthkit.datamodel.Score to be assigned to this artifact * new artifact hit
* @param score sleuthkit.datamodel.Score to be assigned
* to this artifact
* @param justification Justification string * @param justification Justification string
*/ */
private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration, private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact, String configuration,
@ -347,8 +351,7 @@ public class IngestEventsListener {
.getAnalysisResult(); .getAnalysisResult();
try { try {
// index the artifact for keyword search blackboard.postArtifact(newArtifact, MODULE_NAME, null);
blackboard.postArtifact(newArtifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS
} }

View File

@ -87,6 +87,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
private Blackboard blackboard; private Blackboard blackboard;
private final boolean createCorrelationProperties; private final boolean createCorrelationProperties;
private final boolean flagUniqueArtifacts; private final boolean flagUniqueArtifacts;
private IngestJobContext context;
/** /**
* Instantiate the Central Repository ingest module. * Instantiate the Central Repository ingest module.
@ -229,6 +230,8 @@ final class CentralRepoIngestModule implements FileIngestModule {
}) })
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
IngestEventsListener.incrementCorrelationEngineModuleCount(); IngestEventsListener.incrementCorrelationEngineModuleCount();
/* /*
@ -365,7 +368,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
.getAnalysisResult(); .getAnalysisResult();
try { try {
// index the artifact for keyword search // index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME); blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
} }

View File

@ -148,7 +148,7 @@ public class ContactArtifactViewer extends javax.swing.JPanel implements Artifac
@Override @Override
public Component getComponent() { public Component getComponent() {
// Slap a vertical scrollbar on the panel. // Slap a vertical scrollbar on the panel.
return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
} }
@Override @Override

View File

@ -149,7 +149,8 @@ class MessageArtifactWorker extends SwingWorker<MessageArtifactWorker.MesssageAr
static Optional<BlackboardArtifact> getAssociatedArtifact(final BlackboardArtifact artifact) throws TskCoreException { static Optional<BlackboardArtifact> getAssociatedArtifact(final BlackboardArtifact artifact) throws TskCoreException {
BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_TYPE); BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_TYPE);
if (attribute != null) { if (attribute != null) {
return Optional.of(artifact.getSleuthkitCase().getArtifactByArtifactId(attribute.getValueLong())); //in the context of the Message content viewer the associated artifact will always be a data artifact
return Optional.of(artifact.getSleuthkitCase().getBlackboard().getDataArtifactById(attribute.getValueLong()));
} }
return Optional.empty(); return Optional.empty();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -263,7 +262,6 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
// Make sure we have the required fields, otherwise the CommHelper will // Make sure we have the required fields, otherwise the CommHelper will
// complain about illegal arguments. // complain about illegal arguments.
// These are all the invalid combinations. // These are all the invalid combinations.
if (callerId == null && calleeList.isEmpty() if (callerId == null && calleeList.isEmpty()
|| direction == CommunicationDirection.INCOMING && callerId == null || direction == CommunicationDirection.INCOMING && callerId == null
@ -289,7 +287,7 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
if (callerId != null) { if (callerId != null) {
try { try {
currentCase.getCommunicationsManager().createAccountFileInstance( currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, callerId, PARSER_NAME, parent); Account.Type.PHONE, callerId, PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) { } catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex); logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex);
} }
@ -302,12 +300,11 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
for (String phone : calleeList) { for (String phone : calleeList) {
try { try {
currentCase.getCommunicationsManager().createAccountFileInstance( currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, phone, PARSER_NAME, parent); Account.Type.PHONE, phone, PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) { } catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", phone), ex); logger.log(Level.WARNING, String.format("Invalid account identifier %s", phone), ex);
} }
otherAttributes.add(new BlackboardAttribute( otherAttributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER,
PARSER_NAME, phone)); PARSER_NAME, phone));
@ -316,13 +313,13 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser {
if (!otherAttributes.isEmpty()) { if (!otherAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes); BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME); currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
} }
} else { } else {
// Otherwise we can safely use the helper. // Otherwise we can safely use the helper.
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.PHONE); currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
helper.addCalllog(direction, callerId, calleeList, startTime, helper.addCalllog(direction, callerId, calleeList, startTime,
endTime, callType, otherAttributes); endTime, callType, otherAttributes);

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -24,7 +24,6 @@ import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME; import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Account;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -136,7 +135,7 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
// complain about illegal arguments. // complain about illegal arguments.
if (phoneNumber != null || homePhoneNumber != null || mobilePhoneNumber != null || hasAnEmail) { if (phoneNumber != null || homePhoneNumber != null || mobilePhoneNumber != null || hasAnEmail) {
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.DEVICE); currentCase, PARSER_NAME, parent, Account.Type.DEVICE, null);
helper.addContact(contactName, phoneNumber, homePhoneNumber, helper.addContact(contactName, phoneNumber, homePhoneNumber,
mobilePhoneNumber, emailAddr, additionalAttributes); mobilePhoneNumber, emailAddr, additionalAttributes);
@ -145,7 +144,7 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser {
if (!additionalAttributes.isEmpty()) { if (!additionalAttributes.isEmpty()) {
BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes); BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes);
currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME); currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null);
} }
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -95,6 +95,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Indicates if the display name of the XRY key is a recognized type. * Indicates if the display name of the XRY key is a recognized type.
* *
* @param name * @param name
*
* @return * @return
*/ */
public static boolean contains(String name) { public static boolean contains(String name) {
@ -114,6 +115,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand. * contains() before hand.
* *
* @param name * @param name
*
* @return * @return
*/ */
public static XryKey fromDisplayName(String name) { public static XryKey fromDisplayName(String name) {
@ -149,6 +151,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* type. * type.
* *
* @param xryNamespace * @param xryNamespace
*
* @return * @return
*/ */
public static boolean contains(String xryNamespace) { public static boolean contains(String xryNamespace) {
@ -169,6 +172,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand. * contains() before hand.
* *
* @param xryNamespace * @param xryNamespace
*
* @return * @return
*/ */
public static XryNamespace fromDisplayName(String xryNamespace) { public static XryNamespace fromDisplayName(String xryNamespace) {
@ -206,6 +210,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* Indicates if the display name of the XRY key is a recognized type. * Indicates if the display name of the XRY key is a recognized type.
* *
* @param name * @param name
*
* @return * @return
*/ */
public static boolean contains(String name) { public static boolean contains(String name) {
@ -225,6 +230,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* contains() before hand. * contains() before hand.
* *
* @param name * @param name
*
* @return * @return
*/ */
public static XryMetaKey fromDisplayName(String name) { public static XryMetaKey fromDisplayName(String name) {
@ -255,7 +261,9 @@ final class XRYMessagesFileParser implements XRYFileParser {
* @param reader The XRYFileReader that reads XRY entities from the * @param reader The XRYFileReader that reads XRY entities from the
* Message-SMS report. * Message-SMS report.
* @param parent The parent Content to create artifacts from. * @param parent The parent Content to create artifacts from.
* @throws IOException If an I/O error is encountered during report reading *
* @throws IOException If an I/O error is encountered during report
* reading
* @throws TskCoreException If an error during artifact creation is * @throws TskCoreException If an error during artifact creation is
* encountered. * encountered.
*/ */
@ -310,7 +318,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
} else { } else {
try { try {
currentCase.getCommunicationsManager().createAccountFileInstance( currentCase.getCommunicationsManager().createAccountFileInstance(
Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent); Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent, null);
} catch (InvalidAccountIDException ex) { } catch (InvalidAccountIDException ex) {
logger.log(Level.WARNING, String.format("Invalid account identifier %s", pair.getValue()), ex); logger.log(Level.WARNING, String.format("Invalid account identifier %s", pair.getValue()), ex);
} }
@ -429,7 +437,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
} }
CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper(
currentCase, PARSER_NAME, parent, Account.Type.PHONE); currentCase, PARSER_NAME, parent, Account.Type.PHONE, null);
helper.addMessage(messageType, direction, senderId, recipientIdsList, helper.addMessage(messageType, direction, senderId, recipientIdsList,
dateTime, readStatus, subject, text, threadId, otherAttributes); dateTime, readStatus, subject, text, threadId, otherAttributes);
@ -437,8 +445,8 @@ final class XRYMessagesFileParser implements XRYFileParser {
} }
/** /**
* Extracts all pairs from the XRY Entity. This function * Extracts all pairs from the XRY Entity. This function will unify any
* will unify any segmented text, if need be. * segmented text, if need be.
*/ */
private List<XRYKeyValuePair> getXRYKeyValuePairs(String xryEntity, private List<XRYKeyValuePair> getXRYKeyValuePairs(String xryEntity,
XRYFileReader reader, Set<Integer> referenceValues) throws IOException { XRYFileReader reader, Set<Integer> referenceValues) throws IOException {
@ -509,9 +517,12 @@ final class XRYMessagesFileParser implements XRYFileParser {
* single artifact. * single artifact.
* *
* @param reader File reader that is producing XRY entities. * @param reader File reader that is producing XRY entities.
* @param referenceNumbersSeen All known references numbers up until this point. * @param referenceNumbersSeen All known references numbers up until this
* point.
* @param xryEntity The source XRY entity. * @param xryEntity The source XRY entity.
*
* @return * @return
*
* @throws IOException * @throws IOException
*/ */
private String getSegmentedText(String[] xryEntity, XRYFileReader reader, private String getSegmentedText(String[] xryEntity, XRYFileReader reader,
@ -605,6 +616,7 @@ final class XRYMessagesFileParser implements XRYFileParser {
* *
* @param xryLines XRY entity to extract from. * @param xryLines XRY entity to extract from.
* @param metaKey The key type to extract. * @param metaKey The key type to extract.
*
* @return * @return
*/ */
private Optional<Integer> getMetaKeyValue(String[] xryLines, XryMetaKey metaKey) { private Optional<Integer> getMetaKeyValue(String[] xryLines, XryMetaKey metaKey) {
@ -629,10 +641,12 @@ final class XRYMessagesFileParser implements XRYFileParser {
/** /**
* Extracts the ith XRY Key Value pair in the XRY Entity. * Extracts the ith XRY Key Value pair in the XRY Entity.
* *
* The total number of pairs can be determined via getCountOfKeyValuePairs(). * The total number of pairs can be determined via
* getCountOfKeyValuePairs().
* *
* @param xryLines XRY entity. * @param xryLines XRY entity.
* @param index The requested Key Value pair. * @param index The requested Key Value pair.
*
* @return * @return
*/ */
private Optional<XRYKeyValuePair> getKeyValuePairByIndex(String[] xryLines, int index) { private Optional<XRYKeyValuePair> getKeyValuePairByIndex(String[] xryLines, int index) {

View File

@ -388,10 +388,10 @@ public class ResultsSorter implements Comparator<Result> {
Bundle.FileSorter_SortingMethod_keywordlist_displayName()), // Sort alphabetically by list of keyword list names found Bundle.FileSorter_SortingMethod_keywordlist_displayName()), // Sort alphabetically by list of keyword list names found
BY_FULL_PATH(new ArrayList<>(), BY_FULL_PATH(new ArrayList<>(),
Bundle.FileSorter_SortingMethod_fullPath_displayName()), // Sort alphabetically by path Bundle.FileSorter_SortingMethod_fullPath_displayName()), // Sort alphabetically by path
BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()), BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()),
BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()), BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()),
BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()), BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()),
BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName()); BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName());
private final String displayName; private final String displayName;
private final List<DiscoveryAttributes.AttributeType> requiredAttributes; private final List<DiscoveryAttributes.AttributeType> requiredAttributes;

View File

@ -111,10 +111,13 @@ class SampleFileIngestModule implements FileIngestModule {
addToBlackboardPostCount(context.getJobId(), 1L); addToBlackboardPostCount(context.getJobId(), 1L);
/* /*
* post the artifact which will index the artifact for keyword * Post the artifact to the blackboard. Doing so will cause events
* search, and fire an event to notify UI of this new artifact * to be published that will trigger additional analysis, if
* applicable. For example, the creation of timeline events,
* indexing of the artifact for keyword search, and analysis by the
* data artifact ingest modules if the artifact is a data artifact.
*/ */
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName()); file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName(), context.getJobId());
return IngestModule.ProcessResult.OK; return IngestModule.ProcessResult.OK;

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2015-2017 Basis Technology Corp. * Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -21,66 +21,76 @@ package org.sleuthkit.autopsy.guiutils;
import java.awt.Component; import java.awt.Component;
import java.time.Duration; import java.time.Duration;
import javax.swing.JTable; import javax.swing.JTable;
import static javax.swing.SwingConstants.CENTER;
/** /**
* A JTable cell renderer that renders a duration represented as a long as a * A JTable cell renderer that renders a duration represented as a long as a
* string with days, hours, minutes, and seconds components. It center-aligns * string with days, hours, minutes, and seconds components. It center-aligns
* cell content and grays out the cell if the table is disabled. * cell content and grays out the cell if the table is disabled.
*/ */
public class DurationCellRenderer extends GrayableCellRenderer { public final class DurationCellRenderer extends GrayableCellRenderer {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final char UNIT_SEPARATOR_CHAR = ':';
public DurationCellRenderer() { public DurationCellRenderer() {
setHorizontalAlignment(CENTER); setHorizontalAlignment(LEFT);
} }
@Override @Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
if (value instanceof Long) { if (value instanceof Long) {
{
setText(DurationCellRenderer.longToDurationString((long) value)); setText(DurationCellRenderer.longToDurationString((long) value));
} }
}
grayCellIfTableNotEnabled(table, isSelected); grayCellIfTableNotEnabled(table, isSelected);
return this; return this;
} }
public static char getUnitSeperator() {
return UNIT_SEPARATOR_CHAR;
}
/** /**
* Convert a duration represented by a long to a human readable string with * Convert a duration represented by a long to a human readable string with
* with days, hours, minutes, and seconds components. * with days, hours, minutes, and seconds components.
* *
* @param duration - the representation of the duration in long form * @param duration - The representation of the duration in long form.
* *
* @return - the representation of the duration in String form. * @return - The representation of the duration in String form.
*/ */
public static String longToDurationString(long duration) { public static String longToDurationString(long duration) {
Duration d = Duration.ofMillis(duration); Duration d = Duration.ofMillis(duration);
if (d.isNegative()) { if (d.isNegative()) {
d = Duration.ofMillis(-duration); d = Duration.ofMillis(0); //it being 0 for a few seconds seems preferable to it counting down to 0 then back up from 0
} }
String result;
long days = d.toDays(); long days = d.toDays();
long hours = d.minusDays(days).toHours(); long hours = d.minusDays(days).toHours();
long minutes = d.minusDays(days).minusHours(hours).toMinutes(); long minutes = d.minusDays(days).minusHours(hours).toMinutes();
long seconds = d.minusDays(days).minusHours(hours).minusMinutes(minutes).getSeconds(); long seconds = d.minusDays(days).minusHours(hours).minusMinutes(minutes).getSeconds();
if (days < 0) {
if (minutes > 0) { days = 0;
if (hours > 0) {
if (days > 0) {
result = days + " d " + hours + " h " + minutes + " m " + seconds + " s";
} else {
result = hours + " h " + minutes + " m " + seconds + " s";
} }
} else { if (hours < 0) {
result = minutes + " m " + seconds + " s"; hours = 0;
} }
} else { if (minutes < 0) {
result = seconds + " s"; minutes = 0;
} }
return result; if (seconds < 0) {
seconds = 0;
}
StringBuilder results = new StringBuilder(12);
if (days < 99) {
results.append(String.format("%02d", days));
} else {
results.append(days); //in the off chance something has been running for over 99 days lets allow it to stand out a bit by having as many characters as it needs
}
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", hours));
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", minutes));
results.append(UNIT_SEPARATOR_CHAR);
results.append(String.format("%02d", seconds));
return results.toString();
} }
} }

View File

@ -23,27 +23,28 @@ import java.util.Optional;
import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.DataArtifact;
/** /**
* A pipeline of data artifact ingest modules used to execute data artifact * A pipeline of data artifact ingest modules used to perform data artifact
* ingest tasks for an ingest job. * ingest tasks for an ingest job.
*/ */
final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIngestTask> { final class DataArtifactIngestPipeline extends IngestPipeline<DataArtifactIngestTask> {
/** /**
* Constructs a pipeline of data artifact ingest modules used to execute * Constructs a pipeline of data artifact ingest modules used to perform
* data artifact ingest tasks for an ingest job. * data artifact ingest tasks for an ingest job.
* *
* @param ingestJobPipeline The ingest job pipeline that owns this ingest * @param ingestJobExecutor The ingest job executor for this pipeline.
* task pipeline. * @param moduleTemplates The ingest module templates to be used to
* @param moduleTemplates The ingest module templates that define this * construct the ingest modules for this pipeline.
* pipeline. May be an empty list. * May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/ */
DataArtifactIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) { DataArtifactIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates); super(ingestJobExecutor, moduleTemplates);
} }
@Override @Override
Optional<PipelineModule<DataArtifactIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) { Optional<PipelineModule<DataArtifactIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty(); Optional<IngestPipeline.PipelineModule<DataArtifactIngestTask>> module = Optional.empty();
if (template.isDataArtifactIngestModuleTemplate()) { if (template.isDataArtifactIngestModuleTemplate()) {
DataArtifactIngestModule ingestModule = template.createDataArtifactIngestModule(); DataArtifactIngestModule ingestModule = template.createDataArtifactIngestModule();
module = Optional.of(new DataArtifactIngestPipelineModule(ingestModule, template.getModuleName())); module = Optional.of(new DataArtifactIngestPipelineModule(ingestModule, template.getModuleName()));
@ -52,18 +53,18 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
} }
@Override @Override
void prepareForTask(DataArtifactIngestTask task) throws IngestTaskPipelineException { void prepareForTask(DataArtifactIngestTask task) throws IngestPipelineException {
} }
@Override @Override
void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestTaskPipelineException { void cleanUpAfterTask(DataArtifactIngestTask task) throws IngestPipelineException {
} }
/** /**
* A decorator that adds ingest infrastructure operations to a data artifact * A decorator that adds ingest infrastructure operations to a data artifact
* ingest module. * ingest module.
*/ */
static final class DataArtifactIngestPipelineModule extends IngestTaskPipeline.PipelineModule<DataArtifactIngestTask> { static final class DataArtifactIngestPipelineModule extends IngestPipeline.PipelineModule<DataArtifactIngestTask> {
private final DataArtifactIngestModule module; private final DataArtifactIngestModule module;
@ -80,7 +81,7 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline<DataArtifactIn
} }
@Override @Override
void executeTask(IngestJobPipeline ingestJobPipeline, DataArtifactIngestTask task) throws IngestModuleException { void process(IngestJobExecutor ingestJobExecutor, DataArtifactIngestTask task) throws IngestModuleException {
DataArtifact artifact = task.getDataArtifact(); DataArtifact artifact = task.getDataArtifact();
module.process(artifact); module.process(artifact);
} }

View File

@ -22,7 +22,7 @@ import org.sleuthkit.datamodel.DataArtifact;
/** /**
* A data artifact ingest task that will be executed by an ingest thread using a * A data artifact ingest task that will be executed by an ingest thread using a
* given ingest job pipeline. * given ingest job executor.
*/ */
final class DataArtifactIngestTask extends IngestTask { final class DataArtifactIngestTask extends IngestTask {
@ -30,14 +30,14 @@ final class DataArtifactIngestTask extends IngestTask {
/** /**
* Constructs a data artifact ingest task that will be executed by an ingest * Constructs a data artifact ingest task that will be executed by an ingest
* thread using a given ingest job pipeline. * thread using a given ingest job executor.
* *
* @param ingestJobPipeline The ingest job pipeline to use to execute the * @param ingestJobExecutor The ingest job executor to use to execute the
* task. * task.
* @param artifact The data artifact to be processed. * @param artifact The data artifact to be processed.
*/ */
DataArtifactIngestTask(IngestJobPipeline ingestJobPipeline, DataArtifact artifact) { DataArtifactIngestTask(IngestJobExecutor ingestJobExecutor, DataArtifact artifact) {
super(ingestJobPipeline); super(ingestJobExecutor);
this.artifact = artifact; this.artifact = artifact;
} }
@ -53,7 +53,7 @@ final class DataArtifactIngestTask extends IngestTask {
@Override @Override
void execute(long threadId) { void execute(long threadId) {
super.setThreadId(threadId); super.setThreadId(threadId);
getIngestJobPipeline().execute(this); getIngestJobExecutor().execute(this);
} }
} }

View File

@ -23,10 +23,10 @@ package org.sleuthkit.autopsy.ingest;
*/ */
public class DataSourceIngestModuleProgress { public class DataSourceIngestModuleProgress {
private final IngestJobPipeline ingestJobPipeline; private final IngestJobExecutor ingestJobExecutor;
DataSourceIngestModuleProgress(IngestJobPipeline pipeline) { DataSourceIngestModuleProgress(IngestJobExecutor ingestJobExecutor) {
this.ingestJobPipeline = pipeline; this.ingestJobExecutor = ingestJobExecutor;
} }
/** /**
@ -38,7 +38,7 @@ public class DataSourceIngestModuleProgress {
* data source. * data source.
*/ */
public void switchToDeterminate(int workUnits) { public void switchToDeterminate(int workUnits) {
this.ingestJobPipeline.switchDataSourceIngestProgressBarToDeterminate(workUnits); ingestJobExecutor.switchDataSourceIngestProgressBarToDeterminate(workUnits);
} }
/** /**
@ -46,7 +46,7 @@ public class DataSourceIngestModuleProgress {
* the total work units to process the data source is unknown. * the total work units to process the data source is unknown.
*/ */
public void switchToIndeterminate() { public void switchToIndeterminate() {
this.ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate(); ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
} }
/** /**
@ -56,7 +56,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module. * @param workUnits Number of work units performed so far by the module.
*/ */
public void progress(int workUnits) { public void progress(int workUnits) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar("", workUnits); ingestJobExecutor.advanceDataSourceIngestProgressBar("", workUnits);
} }
/** /**
@ -65,7 +65,7 @@ public class DataSourceIngestModuleProgress {
* @param message Message to display * @param message Message to display
*/ */
public void progress(String message) { public void progress(String message) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(message); ingestJobExecutor.advanceDataSourceIngestProgressBar(message);
} }
/** /**
@ -76,7 +76,7 @@ public class DataSourceIngestModuleProgress {
* @param workUnits Number of work units performed so far by the module. * @param workUnits Number of work units performed so far by the module.
*/ */
public void progress(String currentTask, int workUnits) { public void progress(String currentTask, int workUnits) {
this.ingestJobPipeline.advanceDataSourceIngestProgressBar(currentTask, workUnits); ingestJobExecutor.advanceDataSourceIngestProgressBar(currentTask, workUnits);
} }
} }

View File

@ -29,7 +29,7 @@ import org.sleuthkit.datamodel.Content;
* A pipeline of data source level ingest modules for executing data source * A pipeline of data source level ingest modules for executing data source
* level ingest tasks for an ingest job. * level ingest tasks for an ingest job.
*/ */
final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngestTask> { final class DataSourceIngestPipeline extends IngestPipeline<DataSourceIngestTask> {
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName()); private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
private static final IngestManager ingestManager = IngestManager.getInstance(); private static final IngestManager ingestManager = IngestManager.getInstance();
@ -38,17 +38,19 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
* Constructs a pipeline of data source level ingest modules for performing * Constructs a pipeline of data source level ingest modules for performing
* data source level ingest tasks for an ingest job. * data source level ingest tasks for an ingest job.
* *
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline. * @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates that define this * @param moduleTemplates The ingest module templates to be used to
* pipeline. * construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/ */
DataSourceIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) { DataSourceIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates); super(ingestJobExecutor, moduleTemplates);
} }
@Override @Override
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) { Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty(); Optional<IngestPipeline.PipelineModule<DataSourceIngestTask>> module = Optional.empty();
if (template.isDataSourceIngestModuleTemplate()) { if (template.isDataSourceIngestModuleTemplate()) {
DataSourceIngestModule ingestModule = template.createDataSourceIngestModule(); DataSourceIngestModule ingestModule = template.createDataSourceIngestModule();
module = Optional.of(new DataSourcePipelineModule(ingestModule, template.getModuleName())); module = Optional.of(new DataSourcePipelineModule(ingestModule, template.getModuleName()));
@ -69,7 +71,7 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
* A wrapper that adds ingest infrastructure operations to a data source * A wrapper that adds ingest infrastructure operations to a data source
* level ingest module. * level ingest module.
*/ */
static final class DataSourcePipelineModule extends IngestTaskPipeline.PipelineModule<DataSourceIngestTask> { static final class DataSourcePipelineModule extends IngestPipeline.PipelineModule<DataSourceIngestTask> {
private final DataSourceIngestModule module; private final DataSourceIngestModule module;
@ -83,17 +85,17 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline<DataSourceIngest
} }
@Override @Override
void executeTask(IngestJobPipeline ingestJobPipeline, DataSourceIngestTask task) throws IngestModuleException { void process(IngestJobExecutor ingestJobExecutor, DataSourceIngestTask task) throws IngestModuleException {
Content dataSource = task.getDataSource(); Content dataSource = task.getDataSource();
String progressBarDisplayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.displayName", getDisplayName(), dataSource.getName()); String progressBarDisplayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.displayName", getDisplayName(), dataSource.getName());
ingestJobPipeline.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName); ingestJobExecutor.updateDataSourceIngestProgressBarDisplayName(progressBarDisplayName);
ingestJobPipeline.switchDataSourceIngestProgressBarToIndeterminate(); ingestJobExecutor.switchDataSourceIngestProgressBarToIndeterminate();
ingestManager.setIngestTaskProgress(task, getDisplayName()); ingestManager.setIngestTaskProgress(task, getDisplayName());
logger.log(Level.INFO, "{0} analysis of {1} starting", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS logger.log(Level.INFO, "{0} analysis of {1} starting", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobPipeline)); module.process(dataSource, new DataSourceIngestModuleProgress(ingestJobExecutor));
logger.log(Level.INFO, "{0} analysis of {1} finished", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS logger.log(Level.INFO, "{0} analysis of {1} finished", new Object[]{getDisplayName(), dataSource.getName()}); //NON-NLS
if (!ingestJobPipeline.isCancelled() && ingestJobPipeline.currentDataSourceIngestModuleIsCancelled()) { if (!ingestJobExecutor.isCancelled() && ingestJobExecutor.currentDataSourceIngestModuleIsCancelled()) {
ingestJobPipeline.currentDataSourceIngestModuleCancellationCompleted(getDisplayName()); ingestJobExecutor.currentDataSourceIngestModuleCancellationCompleted(getDisplayName());
} }
} }

View File

@ -20,25 +20,25 @@ package org.sleuthkit.autopsy.ingest;
/** /**
* A data source level ingest task that will be executed by an ingest thread * A data source level ingest task that will be executed by an ingest thread
* using a given ingest job pipeline. * using a given ingest job executor.
*/ */
final class DataSourceIngestTask extends IngestTask { final class DataSourceIngestTask extends IngestTask {
/** /**
* Constructs a data source level ingest task that will be executed by an * Constructs a data source level ingest task that will be executed by an
* ingest thread using a given ingest job pipeline. * ingest thread using a given ingest job executor.
* *
* @param ingestJobPipeline The ingest job pipeline to use to execute the * @param ingestJobExecutor The ingest job executor to use to execute the
* task. * task.
*/ */
DataSourceIngestTask(IngestJobPipeline ingestJobPipeline) { DataSourceIngestTask(IngestJobExecutor ingestJobExecutor) {
super(ingestJobPipeline); super(ingestJobExecutor);
} }
@Override @Override
void execute(long threadId) { void execute(long threadId) {
super.setThreadId(threadId); super.setThreadId(threadId);
getIngestJobPipeline().execute(this); getIngestJobExecutor().execute(this);
} }
} }

View File

@ -39,32 +39,34 @@ import org.sleuthkit.datamodel.TskCoreException;
@NbBundle.Messages({ @NbBundle.Messages({
"FileIngestPipeline_SaveResults_Activity=Saving Results" "FileIngestPipeline_SaveResults_Activity=Saving Results"
}) })
final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> { final class FileIngestPipeline extends IngestPipeline<FileIngestTask> {
private static final int FILE_BATCH_SIZE = 500; private static final int FILE_BATCH_SIZE = 500;
private static final String SAVE_RESULTS_ACTIVITY = Bundle.FileIngestPipeline_SaveResults_Activity(); private static final String SAVE_RESULTS_ACTIVITY = Bundle.FileIngestPipeline_SaveResults_Activity();
private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName()); private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName());
private static final IngestManager ingestManager = IngestManager.getInstance(); private static final IngestManager ingestManager = IngestManager.getInstance();
private final IngestJobPipeline ingestJobPipeline; private final IngestJobExecutor ingestJobExecutor;
private final List<AbstractFile> fileBatch; private final List<AbstractFile> fileBatch;
/** /**
* Constructs a pipeline of file ingest modules for executing file ingest * Constructs a pipeline of file ingest modules for executing file ingest
* tasks for an ingest job. * tasks for an ingest job.
* *
* @param ingestJobPipeline The ingest job pipeline that owns this pipeline. * @param ingestJobExecutor The ingest job executor for this pipeline.
* @param moduleTemplates The ingest module templates that define this * @param moduleTemplates The ingest module templates to be used to
* pipeline. * construct the ingest modules for this pipeline.
* May be an empty list if this type of pipeline is
* not needed for the ingest job.
*/ */
FileIngestPipeline(IngestJobPipeline ingestJobPipeline, List<IngestModuleTemplate> moduleTemplates) { FileIngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
super(ingestJobPipeline, moduleTemplates); super(ingestJobExecutor, moduleTemplates);
this.ingestJobPipeline = ingestJobPipeline; this.ingestJobExecutor = ingestJobExecutor;
fileBatch = new ArrayList<>(); fileBatch = new ArrayList<>();
} }
@Override @Override
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) { Optional<IngestPipeline.PipelineModule<FileIngestTask>> acceptModuleTemplate(IngestModuleTemplate template) {
Optional<IngestTaskPipeline.PipelineModule<FileIngestTask>> module = Optional.empty(); Optional<IngestPipeline.PipelineModule<FileIngestTask>> module = Optional.empty();
if (template.isFileIngestModuleTemplate()) { if (template.isFileIngestModuleTemplate()) {
FileIngestModule ingestModule = template.createFileIngestModule(); FileIngestModule ingestModule = template.createFileIngestModule();
module = Optional.of(new FileIngestPipelineModule(ingestModule, template.getModuleName())); module = Optional.of(new FileIngestPipelineModule(ingestModule, template.getModuleName()));
@ -73,18 +75,18 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
} }
@Override @Override
void prepareForTask(FileIngestTask task) throws IngestTaskPipelineException { void prepareForTask(FileIngestTask task) throws IngestPipelineException {
} }
@Override @Override
void cleanUpAfterTask(FileIngestTask task) throws IngestTaskPipelineException { void cleanUpAfterTask(FileIngestTask task) throws IngestPipelineException {
try { try {
ingestManager.setIngestTaskProgress(task, SAVE_RESULTS_ACTIVITY); ingestManager.setIngestTaskProgress(task, SAVE_RESULTS_ACTIVITY);
AbstractFile file = task.getFile(); AbstractFile file = task.getFile();
file.close(); file.close();
cacheFileForBatchUpdate(file); cacheFileForBatchUpdate(file);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new IngestTaskPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS throw new IngestPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS
} finally { } finally {
ingestManager.setIngestTaskProgressCompleted(task); ingestManager.setIngestTaskProgressCompleted(task);
} }
@ -96,7 +98,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
Date start = new Date(); Date start = new Date();
try { try {
updateBatchedFiles(); updateBatchedFiles();
} catch (IngestTaskPipelineException ex) { } catch (IngestPipelineException ex) {
errors.add(new IngestModuleError(SAVE_RESULTS_ACTIVITY, ex)); errors.add(new IngestModuleError(SAVE_RESULTS_ACTIVITY, ex));
} }
Date finish = new Date(); Date finish = new Date();
@ -113,9 +115,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
* *
* @param file The file. * @param file The file.
* *
* @throws IngestTaskPipelineException if the case database update fails. * @throws IngestPipelineException if the case database update fails.
*/ */
private void cacheFileForBatchUpdate(AbstractFile file) throws IngestTaskPipelineException { private void cacheFileForBatchUpdate(AbstractFile file) throws IngestPipelineException {
/* /*
* Only one file ingest thread at a time will try to access the file * Only one file ingest thread at a time will try to access the file
* cache. The synchronization here is to ensure visibility of the files * cache. The synchronization here is to ensure visibility of the files
@ -134,9 +136,9 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
* Updates the case database with new properties added to the files in the * Updates the case database with new properties added to the files in the
* cache by the ingest modules that processed them. * cache by the ingest modules that processed them.
* *
* @throws IngestTaskPipelineException if the case database update fails. * @throws IngestPipelineException if the case database update fails.
*/ */
private void updateBatchedFiles() throws IngestTaskPipelineException { private void updateBatchedFiles() throws IngestPipelineException {
/* /*
* Only one file ingest thread at a time will try to access the file * Only one file ingest thread at a time will try to access the file
* cache. The synchronization here is to ensure visibility of the files * cache. The synchronization here is to ensure visibility of the files
@ -146,7 +148,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
synchronized (fileBatch) { synchronized (fileBatch) {
CaseDbTransaction transaction = null; CaseDbTransaction transaction = null;
try { try {
if (!ingestJobPipeline.isCancelled()) { if (!ingestJobExecutor.isCancelled()) {
Case currentCase = Case.getCurrentCaseThrows(); Case currentCase = Case.getCurrentCaseThrows();
SleuthkitCase caseDb = currentCase.getSleuthkitCase(); SleuthkitCase caseDb = currentCase.getSleuthkitCase();
transaction = caseDb.beginTransaction(); transaction = caseDb.beginTransaction();
@ -166,7 +168,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
logger.log(Level.SEVERE, "Error rolling back transaction after failure to save updated properties for cached files from tasks", ex1); logger.log(Level.SEVERE, "Error rolling back transaction after failure to save updated properties for cached files from tasks", ex1);
} }
} }
throw new IngestTaskPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS throw new IngestPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS
} finally { } finally {
fileBatch.clear(); fileBatch.clear();
} }
@ -177,7 +179,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
* A wrapper that adds ingest infrastructure operations to a file ingest * A wrapper that adds ingest infrastructure operations to a file ingest
* module. * module.
*/ */
static final class FileIngestPipelineModule extends IngestTaskPipeline.PipelineModule<FileIngestTask> { static final class FileIngestPipelineModule extends IngestPipeline.PipelineModule<FileIngestTask> {
private final FileIngestModule module; private final FileIngestModule module;
@ -195,7 +197,7 @@ final class FileIngestPipeline extends IngestTaskPipeline<FileIngestTask> {
} }
@Override @Override
void executeTask(IngestJobPipeline ingestJobPipeline, FileIngestTask task) throws IngestModuleException { void process(IngestJobExecutor ingestJobExecutor, FileIngestTask task) throws IngestModuleException {
AbstractFile file = null; AbstractFile file = null;
try { try {
file = task.getFile(); file = task.getFile();

View File

@ -25,7 +25,7 @@ import org.sleuthkit.datamodel.TskCoreException;
/** /**
* A file ingest task that will be executed by an ingest thread using a given * A file ingest task that will be executed by an ingest thread using a given
* ingest job pipeline. * ingest job executor.
*/ */
final class FileIngestTask extends IngestTask { final class FileIngestTask extends IngestTask {
@ -34,13 +34,13 @@ final class FileIngestTask extends IngestTask {
/** /**
* Constructs a file ingest task that will be executed by an ingest thread * Constructs a file ingest task that will be executed by an ingest thread
* using a given ingest job pipeline. * using a given ingest job executor.
* *
* @param ingestJobPipeline The ingest job pipeline to use to execute the * @param ingestJobPipeline The ingest job executor to use to execute the
* task. * task.
* @param file The file to be processed. * @param file The file to be processed.
*/ */
FileIngestTask(IngestJobPipeline ingestJobPipeline, AbstractFile file) { FileIngestTask(IngestJobExecutor ingestJobPipeline, AbstractFile file) {
super(ingestJobPipeline); super(ingestJobPipeline);
this.file = file; this.file = file;
fileId = file.getId(); fileId = file.getId();
@ -48,15 +48,15 @@ final class FileIngestTask extends IngestTask {
/** /**
* Constructs a file ingest task that will be executed by an ingest thread * Constructs a file ingest task that will be executed by an ingest thread
* using a given ingest job pipeline. This constructor supports streaming * using a given ingest job executor. This constructor supports streaming
* ingest by deferring the construction of the AbstractFile object for this * ingest by deferring the construction of the AbstractFile object for this
* task to conserve heap memory. * task to conserve heap memory.
* *
* @param ingestJobPipeline The ingest job pipeline to use to execute the * @param ingestJobPipeline The ingest job executor to use to execute the
* task. * task.
* @param fileId The object ID of the file to be processed. * @param fileId The object ID of the file to be processed.
*/ */
FileIngestTask(IngestJobPipeline ingestJobPipeline, long fileId) { FileIngestTask(IngestJobExecutor ingestJobPipeline, long fileId) {
super(ingestJobPipeline); super(ingestJobPipeline);
this.fileId = fileId; this.fileId = fileId;
} }
@ -88,7 +88,7 @@ final class FileIngestTask extends IngestTask {
@Override @Override
void execute(long threadId) { void execute(long threadId) {
super.setThreadId(threadId); super.setThreadId(threadId);
getIngestJobPipeline().execute(this); getIngestJobExecutor().execute(this);
} }
@Override @Override
@ -100,19 +100,19 @@ final class FileIngestTask extends IngestTask {
return false; return false;
} }
FileIngestTask other = (FileIngestTask) obj; FileIngestTask other = (FileIngestTask) obj;
IngestJobPipeline thisPipeline = getIngestJobPipeline(); IngestJobExecutor thisPipeline = getIngestJobExecutor();
IngestJobPipeline otherPipeline = other.getIngestJobPipeline(); IngestJobExecutor otherPipeline = other.getIngestJobExecutor();
if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) { if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) {
return false; return false;
} }
return (this.fileId == other.fileId); return (getFileId() == other.getFileId());
} }
@Override @Override
public int hashCode() { public int hashCode() {
int hash = 5; int hash = 5;
hash = 47 * hash + Objects.hashCode(getIngestJobPipeline()); hash = 47 * hash + Objects.hashCode(getIngestJobExecutor());
hash = 47 * hash + Objects.hashCode(this.fileId); hash = 47 * hash + Objects.hashCode(getFileId());
return hash; return hash;
} }

View File

@ -28,10 +28,11 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/** /**
* Analyzes one or more data sources using a set of ingest modules specified via * Analyzes a data sources using a set of ingest modules specified via ingest
* ingest job settings. * job settings.
*/ */
public final class IngestJob { public final class IngestJob {
@ -73,17 +74,17 @@ public final class IngestJob {
private final List<AbstractFile> files = new ArrayList<>(); private final List<AbstractFile> files = new ArrayList<>();
private final Mode ingestMode; private final Mode ingestMode;
private final IngestJobSettings settings; private final IngestJobSettings settings;
private volatile IngestJobPipeline ingestJobPipeline; private volatile IngestJobExecutor ingestModuleExecutor;
private volatile CancellationReason cancellationReason; private volatile CancellationReason cancellationReason;
/** /**
* Constructs a batch mode ingest job that analyzes a data source using a * Constructs a batch mode ingest job that analyzes a data source using a
* set of ingest modules specified via ingest job settings. Either all of * set of ingest modules specified via ingest job settings.
* the files in the data source or a given subset of the files will be
* analyzed.
* *
* @param dataSource The data source to be analyzed. * @param dataSource The data source to be analyzed.
* @param files A subset of the files from the data source. * @param files A subset of the files from the data source to be
* analyzed, may be empty if all of the files should be
* analyzed.
* @param settings The ingest job settings. * @param settings The ingest job settings.
*/ */
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) { IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
@ -91,13 +92,6 @@ public final class IngestJob {
this.files.addAll(files); this.files.addAll(files);
} }
/**
* Constructs an ingest job that analyzes a data source using a set of
* ingest modules specified via ingest job settings, possibly using an
* ingest stream.
*
* @param settings The ingest job settings.
*/
/** /**
* Constructs an ingest job that analyzes a data source using a set of * Constructs an ingest job that analyzes a data source using a set of
* ingest modules specified via ingest job settings, possibly using an * ingest modules specified via ingest job settings, possibly using an
@ -108,7 +102,7 @@ public final class IngestJob {
* @param settings The ingest job settings. * @param settings The ingest job settings.
*/ */
IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) { IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) {
this.id = IngestJob.nextId.getAndIncrement(); id = IngestJob.nextId.getAndIncrement();
this.dataSource = dataSource; this.dataSource = dataSource;
this.settings = settings; this.settings = settings;
this.ingestMode = ingestMode; this.ingestMode = ingestMode;
@ -125,6 +119,15 @@ public final class IngestJob {
return this.id; return this.id;
} }
/**
* Gets the data source to be analyzed by this job.
*
* @return The data source.
*/
Content getDataSource() {
return dataSource;
}
/** /**
* Checks to see if this ingest job has at least one non-empty ingest module * Checks to see if this ingest job has at least one non-empty ingest module
* pipeline. * pipeline.
@ -136,31 +139,41 @@ public final class IngestJob {
} }
/** /**
* Adds a set of files to this ingest job if it is running in streaming * Adds a set of files to this ingest job, if it is running in streaming
* ingest mode. * ingest mode.
* *
* @param fileObjIds The object IDs of the files. * @param fileObjIds The object IDs of the files.
*/ */
void addStreamingIngestFiles(List<Long> fileObjIds) { void addStreamedFiles(List<Long> fileObjIds) {
if (ingestMode == Mode.STREAMING) { if (ingestMode == Mode.STREAMING) {
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
ingestJobPipeline.addStreamedFiles(fileObjIds); ingestModuleExecutor.addStreamedFiles(fileObjIds);
} else { } else {
logger.log(Level.SEVERE, "Attempted to add streamed ingest files with no ingest pipeline"); logger.log(Level.SEVERE, "Attempted to add streamed files with no ingest pipeline");
} }
} else { } else {
logger.log(Level.SEVERE, "Attempted to add streamed ingest files to batch ingest job"); logger.log(Level.SEVERE, "Attempted to add streamed files to batch ingest job");
} }
} }
/**
* Adds one or more data artifacts to this ingest job for processing by its
* data artifact ingest modules.
*
* @param dataArtifacts The data artifacts.
*/
void addDataArtifacts(List<DataArtifact> dataArtifacts) {
ingestModuleExecutor.addDataArtifacts(dataArtifacts);
}
/** /**
* Starts data source level analysis for this job if it is running in * Starts data source level analysis for this job if it is running in
* streaming ingest mode. * streaming ingest mode.
*/ */
void processStreamingIngestDataSource() { void processStreamingIngestDataSource() {
if (ingestMode == Mode.STREAMING) { if (ingestMode == Mode.STREAMING) {
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
ingestJobPipeline.addStreamedDataSource(); ingestModuleExecutor.startStreamingModeDataSourceAnalysis();
} else { } else {
logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline"); logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline");
} }
@ -176,16 +189,16 @@ public final class IngestJob {
* @return A collection of ingest module start up errors, empty on success. * @return A collection of ingest module start up errors, empty on success.
*/ */
synchronized List<IngestModuleError> start() throws InterruptedException { synchronized List<IngestModuleError> start() throws InterruptedException {
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
logger.log(Level.SEVERE, "Attempt to start ingest job that has already been started"); logger.log(Level.SEVERE, "Attempt to start ingest job that has already been started");
return Collections.emptyList(); return Collections.emptyList();
} }
ingestJobPipeline = new IngestJobPipeline(this, dataSource, files, settings); ingestModuleExecutor = new IngestJobExecutor(this, dataSource, files, settings);
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(ingestJobPipeline.startUp()); errors.addAll(ingestModuleExecutor.startUp());
if (errors.isEmpty()) { if (errors.isEmpty()) {
IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestJobPipeline.getDataSource()); IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestModuleExecutor.getDataSource());
} else { } else {
cancel(CancellationReason.INGEST_MODULES_STARTUP_FAILED); cancel(CancellationReason.INGEST_MODULES_STARTUP_FAILED);
} }
@ -220,7 +233,7 @@ public final class IngestJob {
*/ */
public ProgressSnapshot getSnapshot(boolean includeIngestTasksSnapshot) { public ProgressSnapshot getSnapshot(boolean includeIngestTasksSnapshot) {
ProgressSnapshot snapshot = null; ProgressSnapshot snapshot = null;
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
return new ProgressSnapshot(includeIngestTasksSnapshot); return new ProgressSnapshot(includeIngestTasksSnapshot);
} }
return snapshot; return snapshot;
@ -233,8 +246,8 @@ public final class IngestJob {
*/ */
Snapshot getDiagnosticStatsSnapshot() { Snapshot getDiagnosticStatsSnapshot() {
Snapshot snapshot = null; Snapshot snapshot = null;
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(true); snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(true);
} }
return snapshot; return snapshot;
} }
@ -272,8 +285,8 @@ public final class IngestJob {
* ingest manager's ingest jobs list lock. * ingest manager's ingest jobs list lock.
*/ */
new Thread(() -> { new Thread(() -> {
if (ingestJobPipeline != null) { if (ingestModuleExecutor != null) {
ingestJobPipeline.cancel(reason); ingestModuleExecutor.cancel(reason);
} }
}).start(); }).start();
} }
@ -284,7 +297,7 @@ public final class IngestJob {
* @return The cancellation reason, may be not cancelled. * @return The cancellation reason, may be not cancelled.
*/ */
public CancellationReason getCancellationReason() { public CancellationReason getCancellationReason() {
return this.cancellationReason; return cancellationReason;
} }
/** /**
@ -294,18 +307,16 @@ public final class IngestJob {
* @return True or false. * @return True or false.
*/ */
public boolean isCancelled() { public boolean isCancelled() {
return (CancellationReason.NOT_CANCELLED != this.cancellationReason); return (CancellationReason.NOT_CANCELLED != cancellationReason);
} }
/** /**
* Provides a callback for the ingest modules pipeline, allowing this ingest * Provides a callback for the ingest module executor, allowing this ingest
* job to notify the ingest manager when it is complete. * job to notify the ingest manager when it is complete.
*
* @param ingestJobPipeline A completed ingestJobPipeline.
*/ */
void notifyIngestPipelineShutDown() { void notifyIngestPipelinesShutDown() {
IngestManager ingestManager = IngestManager.getInstance(); IngestManager ingestManager = IngestManager.getInstance();
if (!ingestJobPipeline.isCancelled()) { if (!ingestModuleExecutor.isCancelled()) {
ingestManager.fireDataSourceAnalysisCompleted(id, dataSource); ingestManager.fireDataSourceAnalysisCompleted(id, dataSource);
} else { } else {
IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, dataSource); IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, dataSource);
@ -423,11 +434,7 @@ public final class IngestJob {
* stats part of the snapshot. * stats part of the snapshot.
*/ */
private ProgressSnapshot(boolean includeIngestTasksSnapshot) { private ProgressSnapshot(boolean includeIngestTasksSnapshot) {
/* Snapshot snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
* Note that the getSnapshot() will not construct a ProgressSnapshot
* if ingestJobPipeline is null.
*/
Snapshot snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot);
dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot); dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot);
jobCancellationRequested = IngestJob.this.isCancelled(); jobCancellationRequested = IngestJob.this.isCancelled();
jobCancellationReason = IngestJob.this.getCancellationReason(); jobCancellationReason = IngestJob.this.getCancellationReason();
@ -444,7 +451,7 @@ public final class IngestJob {
DataSourceIngestModuleHandle moduleHandle = null; DataSourceIngestModuleHandle moduleHandle = null;
DataSourceIngestPipeline.DataSourcePipelineModule module = dataSourceProcessingSnapshot.getDataSourceLevelIngestModule(); DataSourceIngestPipeline.DataSourcePipelineModule module = dataSourceProcessingSnapshot.getDataSourceLevelIngestModule();
if (module != null) { if (module != null) {
moduleHandle = new DataSourceIngestModuleHandle(ingestJobPipeline, module); moduleHandle = new DataSourceIngestModuleHandle(ingestModuleExecutor, module);
} }
return moduleHandle; return moduleHandle;
} }
@ -507,7 +514,7 @@ public final class IngestJob {
*/ */
public static class DataSourceIngestModuleHandle { public static class DataSourceIngestModuleHandle {
private final IngestJobPipeline ingestJobPipeline; private final IngestJobExecutor ingestJobExecutor;
private final DataSourceIngestPipeline.DataSourcePipelineModule module; private final DataSourceIngestPipeline.DataSourcePipelineModule module;
private final boolean cancelled; private final boolean cancelled;
@ -516,14 +523,14 @@ public final class IngestJob {
* used to get basic information about the module and to request * used to get basic information about the module and to request
* cancellation of the module. * cancellation of the module.
* *
* @param ingestJobPipeline The ingestJobPipeline that owns the data * @param ingestJobExecutor The ingest job executor that owns the data
* source level ingest module. * source level ingest module.
* @param module The data source level ingest module. * @param module The data source level ingest module.
*/ */
private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) { private DataSourceIngestModuleHandle(IngestJobExecutor ingestJobExecutor, DataSourceIngestPipeline.DataSourcePipelineModule module) {
this.ingestJobPipeline = ingestJobPipeline; this.ingestJobExecutor = ingestJobExecutor;
this.module = module; this.module = module;
this.cancelled = ingestJobPipeline.currentDataSourceIngestModuleIsCancelled(); this.cancelled = ingestJobExecutor.currentDataSourceIngestModuleIsCancelled();
} }
/** /**
@ -533,7 +540,7 @@ public final class IngestJob {
* @return The display name. * @return The display name.
*/ */
public String displayName() { public String displayName() {
return this.module.getDisplayName(); return module.getDisplayName();
} }
/** /**
@ -543,7 +550,7 @@ public final class IngestJob {
* @return The module processing start time. * @return The module processing start time.
*/ */
public Date startTime() { public Date startTime() {
return this.module.getProcessingStartTime(); return module.getProcessingStartTime();
} }
/** /**
@ -553,7 +560,7 @@ public final class IngestJob {
* @return True or false. * @return True or false.
*/ */
public boolean isCancelled() { public boolean isCancelled() {
return this.cancelled; return cancelled;
} }
/** /**
@ -567,8 +574,8 @@ public final class IngestJob {
* could perhaps be solved by adding a cancel() API to the * could perhaps be solved by adding a cancel() API to the
* IngestModule interface. * IngestModule interface.
*/ */
if (this.ingestJobPipeline.getCurrentDataSourceIngestModule() == this.module) { if (ingestJobExecutor.getCurrentDataSourceIngestModule() == module) {
this.ingestJobPipeline.cancelCurrentDataSourceIngestModule(); ingestJobExecutor.cancelCurrentDataSourceIngestModule();
} }
} }

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.ingest;
import java.util.List; import java.util.List;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
/** /**
* Provides an ingest module with services specific to the ingest job of which * Provides an ingest module with services specific to the ingest job of which
@ -29,16 +28,16 @@ import org.sleuthkit.datamodel.DataArtifact;
*/ */
public final class IngestJobContext { public final class IngestJobContext {
private final IngestJobPipeline ingestJobPipeline; private final IngestJobExecutor ingestJobExecutor;
/** /**
* Constructs an ingest job context object that provides an ingest module * Constructs an ingest job context object that provides an ingest module
* with services specific to the ingest job of which the module is a part. * with services specific to the ingest job of which the module is a part.
* *
* @param ingestJobPipeline The ingest pipeline for the job. * @param ingestJobExecutor The ingest executor for the job.
*/ */
IngestJobContext(IngestJobPipeline ingestJobPipeline) { IngestJobContext(IngestJobExecutor ingestJobExecutor) {
this.ingestJobPipeline = ingestJobPipeline; this.ingestJobExecutor = ingestJobExecutor;
} }
/** /**
@ -47,7 +46,7 @@ public final class IngestJobContext {
* @return The context string. * @return The context string.
*/ */
public String getExecutionContext() { public String getExecutionContext() {
return ingestJobPipeline.getExecutionContext(); return ingestJobExecutor.getExecutionContext();
} }
/** /**
@ -56,7 +55,7 @@ public final class IngestJobContext {
* @return The data source. * @return The data source.
*/ */
public Content getDataSource() { public Content getDataSource() {
return ingestJobPipeline.getDataSource(); return ingestJobExecutor.getDataSource();
} }
/** /**
@ -65,7 +64,7 @@ public final class IngestJobContext {
* @return The ID. * @return The ID.
*/ */
public long getJobId() { public long getJobId() {
return ingestJobPipeline.getIngestJobId(); return ingestJobExecutor.getIngestJobId();
} }
/** /**
@ -79,7 +78,7 @@ public final class IngestJobContext {
*/ */
@Deprecated @Deprecated
public boolean isJobCancelled() { public boolean isJobCancelled() {
return ingestJobPipeline.isCancelled(); return ingestJobExecutor.isCancelled();
} }
/** /**
@ -91,7 +90,7 @@ public final class IngestJobContext {
* @return True or false. * @return True or false.
*/ */
public boolean dataSourceIngestIsCancelled() { public boolean dataSourceIngestIsCancelled() {
return ingestJobPipeline.currentDataSourceIngestModuleIsCancelled() || ingestJobPipeline.isCancelled(); return ingestJobExecutor.currentDataSourceIngestModuleIsCancelled() || ingestJobExecutor.isCancelled();
} }
/** /**
@ -106,7 +105,7 @@ public final class IngestJobContext {
* It is not currently possible to cancel individual file ingest * It is not currently possible to cancel individual file ingest
* modules. * modules.
*/ */
return ingestJobPipeline.isCancelled(); return ingestJobExecutor.isCancelled();
} }
/** /**
@ -122,7 +121,7 @@ public final class IngestJobContext {
* It is not currently possible to cancel individual data artifact * It is not currently possible to cancel individual data artifact
* ingest modules. * ingest modules.
*/ */
return ingestJobPipeline.isCancelled(); return ingestJobExecutor.isCancelled();
} }
/** /**
@ -132,7 +131,7 @@ public final class IngestJobContext {
* @return True or false. * @return True or false.
*/ */
public boolean processingUnallocatedSpace() { public boolean processingUnallocatedSpace() {
return ingestJobPipeline.shouldProcessUnallocatedSpace(); return ingestJobExecutor.shouldProcessUnallocatedSpace();
} }
/** /**
@ -155,17 +154,7 @@ public final class IngestJobContext {
* @param files The files. * @param files The files.
*/ */
public void addFilesToJob(List<AbstractFile> files) { public void addFilesToJob(List<AbstractFile> files) {
ingestJobPipeline.addFiles(files); ingestJobExecutor.addFiles(files);
}
/**
* Adds one or more data artifacts to the ingest job for processing by its
* data artifact ingest modules.
*
* @param artifacts The artifacts.
*/
public void addDataArtifactsToJob(List<DataArtifact> artifacts) {
ingestJobPipeline.addDataArtifacts(artifacts);
} }
} }

View File

@ -56,7 +56,7 @@ class IngestJobInputStream implements IngestStream {
if (closed) { if (closed) {
throw new IngestStreamClosedException("Can not add files - ingest stream is closed"); throw new IngestStreamClosedException("Can not add files - ingest stream is closed");
} }
ingestJob.addStreamingIngestFiles(fileObjectIds); ingestJob.addStreamedFiles(fileObjectIds);
} }
@Override @Override

View File

@ -34,6 +34,7 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DataArtifact;
import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -288,13 +290,103 @@ public class IngestManager implements IngestProgressSnapshotProvider {
/** /**
* Handles artifacts posted events published by the Sleuth Kit layer * Handles artifacts posted events published by the Sleuth Kit layer
* blackboard via the event bus for the case database. * blackboard via the Sleuth Kit event bus.
* *
* @param tskEvent A Sleuth Kit data model ArtifactsPostedEvent from the * @param tskEvent The event.
* case database event bus.
*/ */
@Subscribe @Subscribe
void handleArtifactsPosted(Blackboard.ArtifactsPostedEvent tskEvent) { void handleArtifactsPosted(Blackboard.ArtifactsPostedEvent tskEvent) {
/*
* Add any new data artifacts included in the event to the source ingest
* job for possible analysis.
*/
List<DataArtifact> newDataArtifacts = new ArrayList<>();
Collection<BlackboardArtifact> newArtifacts = tskEvent.getArtifacts();
for (BlackboardArtifact artifact : newArtifacts) {
if (artifact instanceof DataArtifact) {
newDataArtifacts.add((DataArtifact) artifact);
}
}
if (!newDataArtifacts.isEmpty()) {
IngestJob ingestJob = null;
Optional<Long> ingestJobId = tskEvent.getIngestJobId();
if (ingestJobId.isPresent()) {
synchronized (ingestJobsById) {
ingestJob = ingestJobsById.get(ingestJobId.get());
}
} else {
/*
* There are four use cases where the ingest job ID returned by
* the event is expected be null:
*
* 1. The artifacts are being posted by a data source proccessor
* (DSP) module that runs before the ingest job is created,
* i.e., a DSP that does not support streaming ingest and has no
* noton of an ingest job ID. In this use case, the event is
* handled synchronously. The DSP calls
* Blackboard.postArtifacts(), which puts the event on the event
* bus to which this method subscribes, so the event will be
* handled here before the DSP completes and calls
* DataSourceProcessorCallback.done(). This means the code below
* will execute before the ingest job is created, so it will not
* find an ingest job to which to add the artifacts. However,
* the artifacts WILL be analyzed after the ingest job is
* started, when the ingest job executor, working in batch mode,
* schedules ingest tasks for all of the data artifacts in the
* case database. There is a slight risk that the wrong ingest
* job will be selected if multiple ingests of the same data
* source are in progress.
*
* 2. The artifacts were posted by an ingest module that either
* has not been updated to use the current
* Blackboard.postArtifacts() API, or is using it incorrectly.
* In this use case, the code below should be able to find the
* ingest job to which to add the artifacts via their data
* source. There is a slight risk that the wrong ingest job will
* be selected if multiple ingests of the same data source are
* in progress.
*
* 3. The portable case generator uses a
* CommunicationArtifactsHelper constructed with a null ingest
* job ID, and the CommunicatonsArtifactHelper posts artifacts.
* Ingest of that data source might be running, in which case
* the data artifact will be analyzed. It also might be analyzed
* by a subsequent ingest job for the data source. This is an
* acceptable edge case.
*
* 4. The user can manually create timeline events with the
* timeline tool, which posts the TSK_TL_EVENT data artifacts.
* The user selects the data source for these artifacts. Ingest
* of that data source might be running, in which case the data
* artifact will be analyzed. It also might be analyzed by a
* subsequent ingest job for the data source. This is an
* acceptable edge case.
*/
DataArtifact dataArtifact = newDataArtifacts.get(0);
try {
Content artifactDataSource = dataArtifact.getDataSource();
synchronized (ingestJobsById) {
for (IngestJob job : ingestJobsById.values()) {
Content dataSource = job.getDataSource();
if (artifactDataSource.getId() == dataSource.getId()) {
ingestJob = job;
break;
}
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source for data artifact (object ID = %d)", dataArtifact.getId()), ex); //NON-NLS
}
}
if (ingestJob != null) {
ingestJob.addDataArtifacts(newDataArtifacts);
}
}
/*
* Publish Autopsy events for the new artifacts, one event per artifact
* type.
*/
for (BlackboardArtifact.Type artifactType : tskEvent.getArtifactTypes()) { for (BlackboardArtifact.Type artifactType : tskEvent.getArtifactTypes()) {
ModuleDataEvent legacyEvent = new ModuleDataEvent(tskEvent.getModuleName(), artifactType, tskEvent.getArtifacts(artifactType)); ModuleDataEvent legacyEvent = new ModuleDataEvent(tskEvent.getModuleName(), artifactType, tskEvent.getArtifacts(artifactType));
AutopsyEvent autopsyEvent = new BlackboardPostEvent(legacyEvent); AutopsyEvent autopsyEvent = new BlackboardPostEvent(legacyEvent);
@ -825,7 +917,7 @@ public class IngestManager implements IngestProgressSnapshotProvider {
*/ */
void setIngestTaskProgress(DataSourceIngestTask task, String currentModuleName) { void setIngestTaskProgress(DataSourceIngestTask task, String currentModuleName) {
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId()); IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource()); IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap); ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);
/* /*
@ -847,10 +939,10 @@ public class IngestManager implements IngestProgressSnapshotProvider {
IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId()); IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId());
IngestThreadActivitySnapshot newSnap; IngestThreadActivitySnapshot newSnap;
try { try {
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile()); newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile());
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting file from file ingest task", ex); logger.log(Level.SEVERE, "Error getting file from file ingest task", ex);
newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource()); newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource());
} }
ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap); ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap);

View File

@ -33,21 +33,24 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
/** /**
* An abstract superclass for pipelines of ingest modules that execute ingest * An abstract superclass for pipelines of ingest modules that perform the
* tasks for an ingest job. Subclasses need to extend this class and to * ingest tasks that make up an ingest job. A pipeline performs a task by
* implement a specialization of the inner PipelineModule abstract superclass. * passing it sequentially to the process() method of each module in the
* pipeline.
* *
* NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use * @param <T> The type of ingest tasks the pipeline performs.
* by one thread at a time. There are a few status fields that are volatile to
* ensure visibility to threads making ingest progress snapshots, but methods
* such as startUp(), executeTask() and shutDown() are not synchronized.
*
* @param <T> The ingest task type.
*/ */
abstract class IngestTaskPipeline<T extends IngestTask> { abstract class IngestPipeline<T extends IngestTask> {
private static final Logger logger = Logger.getLogger(IngestTaskPipeline.class.getName()); /*
private final IngestJobPipeline ingestJobPipeline; * NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use
* by one thread at a time. There are a few status fields that are volatile
* to ensure visibility to threads making ingest progress snapshots, but
* methods such as startUp(), performTask() and shutDown() are not
* synchronized.
*/
private static final Logger logger = Logger.getLogger(IngestPipeline.class.getName());
private final IngestJobExecutor ingestJobExecutor;
private final List<IngestModuleTemplate> moduleTemplates; private final List<IngestModuleTemplate> moduleTemplates;
private final List<PipelineModule<T>> modules; private final List<PipelineModule<T>> modules;
private volatile Date startTime; private volatile Date startTime;
@ -56,38 +59,34 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/** /**
* Constructs the superclass part of a pipeline of ingest modules that * Constructs the superclass part of a pipeline of ingest modules that
* executes ingest tasks for an ingest job. * performs ingest tasks for an ingest job.
* *
* @param ingestPipeline The parent ingest job pipeline for this ingest * @param ingestJobExecutor The ingest job executor for this pipeline.
* task pipeline. * @param moduleTemplates The ingest module templates to be used to
* @param moduleTemplates The ingest module templates that define this * construct the ingest modules for this pipeline.
* ingest task pipeline. May be an empty list. * May be an empty list if this type of pipeline is
*/ * not needed for the ingest job.
IngestTaskPipeline(IngestJobPipeline ingestPipeline, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJobPipeline = ingestPipeline;
/*
* The creation of ingest modules from the ingest module templates has
* been deliberately deferred to the startUp() method so that any and
* all errors in module construction or start up can be reported to the
* client code.
*/ */
IngestPipeline(IngestJobExecutor ingestJobExecutor, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJobExecutor = ingestJobExecutor;
this.moduleTemplates = moduleTemplates; this.moduleTemplates = moduleTemplates;
modules = new ArrayList<>(); modules = new ArrayList<>();
} }
/** /**
* Indicates whether or not there are any ingest modules in this ingest task * Indicates whether or not there are any ingest modules in this ingest
* pipeline. * pipeline.
* *
* @return True or false. * @return True or false; always true before startUp() is called.
*/ */
boolean isEmpty() { boolean isEmpty() {
return modules.isEmpty(); return modules.isEmpty();
} }
/** /**
* Queries whether or not this ingest task pipeline is running, i.e., the * Queries whether or not this ingest pipeline is running, i.e., the
* startUp() method has been called and the shutDown() has not been called. * startUp() method has been called and the shutDown() method has not been
* called yet.
* *
* @return True or false. * @return True or false.
*/ */
@ -96,8 +95,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* Starts up this ingest task pipeline by calling the startUp() methods of * Starts up this ingest pipeline by calling the startUp() methods of the
* the ingest modules in the pipeline. * ingest modules in the pipeline.
* *
* @return A list of ingest module start up errors, possibly empty. * @return A list of ingest module start up errors, possibly empty.
*/ */
@ -110,21 +109,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
* any and all errors in module construction or start up can be * any and all errors in module construction or start up can be
* reported to the client code. * reported to the client code.
*/ */
createIngestModules(moduleTemplates); createIngestModules();
errors.addAll(startUpIngestModules()); errors.addAll(startUpIngestModules());
} else { } else {
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline already started"))); //NON-NLS errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline already started"))); //NON-NLS
} }
return errors; return errors;
} }
/** /**
* Creates the ingest modules for this ingest task pipeline from the given * Creates the ingest modules for this ingest pipeline using its ingest
* ingest module templates. * module templates.
*
* @param moduleTemplates The ingest module templates.
*/ */
private void createIngestModules(List<IngestModuleTemplate> moduleTemplates) { private void createIngestModules() {
if (modules.isEmpty()) { if (modules.isEmpty()) {
for (IngestModuleTemplate template : moduleTemplates) { for (IngestModuleTemplate template : moduleTemplates) {
Optional<PipelineModule<T>> module = acceptModuleTemplate(template); Optional<PipelineModule<T>> module = acceptModuleTemplate(template);
@ -137,8 +134,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/** /**
* Determines if one of the types of ingest modules that can be created from * Determines if one of the types of ingest modules that can be created from
* a given ingest module template should be added to this ingest task * a given ingest module template should be added to this ingest pipeline.
* pipeline. If so, the ingest module is created and returned. * If so, the ingest module is created and returned.
* *
* @param template The ingest module template to be used or ignored, as * @param template The ingest module template to be used or ignored, as
* appropriate to the pipeline type. * appropriate to the pipeline type.
@ -149,7 +146,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
abstract Optional<PipelineModule<T>> acceptModuleTemplate(IngestModuleTemplate template); abstract Optional<PipelineModule<T>> acceptModuleTemplate(IngestModuleTemplate template);
/** /**
* Starts up the ingest modules in this ingest task pipeline. * Starts up the ingest modules in this ingest pipeline.
* *
* @return A list of ingest module start up errors, possibly empty. * @return A list of ingest module start up errors, possibly empty.
*/ */
@ -159,7 +156,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
running = true; running = true;
for (PipelineModule<T> module : modules) { for (PipelineModule<T> module : modules) {
try { try {
module.startUp(new IngestJobContext(ingestJobPipeline)); module.startUp(new IngestJobContext(ingestJobExecutor));
} catch (Throwable ex) { } catch (Throwable ex) {
/* /*
* A catch-all exception firewall. Start up errors for all of * A catch-all exception firewall. Start up errors for all of
@ -174,10 +171,10 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* Returns the start up time of this ingest task pipeline. * Returns the start up time of this ingest pipeline.
* *
* @return The file processing start time, may be null if this pipeline has * @return The start up time, may be null if this pipeline has not been
* not been started yet. * started yet.
*/ */
Date getStartTime() { Date getStartTime() {
Date reportedStartTime = null; Date reportedStartTime = null;
@ -188,65 +185,66 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* Executes an ingest task by calling the process() methods of the ingest * Performs an ingest task by sequentially calling the process() methods of
* modules in this ingest task pipeline. * the ingest modules in this ingest pipeline.
* *
* @param task The task. * @param task The task.
* *
* @return A list of ingest module task processing errors, possibly empty. * @return A list of ingest module processing errors, possibly empty.
*/ */
List<IngestModuleError> executeTask(T task) { List<IngestModuleError> performTask(T task) {
List<IngestModuleError> errors = new ArrayList<>(); List<IngestModuleError> errors = new ArrayList<>();
if (running) { if (running) {
if (!ingestJobPipeline.isCancelled()) { if (!ingestJobExecutor.isCancelled()) {
pauseIfScheduled(); pauseIfScheduled();
if (ingestJobPipeline.isCancelled()) { if (ingestJobExecutor.isCancelled()) {
return errors; return errors;
} }
try { try {
prepareForTask(task); prepareForTask(task);
} catch (IngestTaskPipelineException ex) { } catch (IngestPipelineException ex) {
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
return errors; return errors;
} }
for (PipelineModule<T> module : modules) { for (PipelineModule<T> module : modules) {
pauseIfScheduled(); pauseIfScheduled();
if (ingestJobPipeline.isCancelled()) { if (ingestJobExecutor.isCancelled()) {
break; break;
} }
try { try {
currentModule = module; currentModule = module;
currentModule.setProcessingStartTime(); currentModule.setProcessingStartTime();
module.executeTask(ingestJobPipeline, task); module.process(ingestJobExecutor, task);
} catch (Throwable ex) { } catch (Throwable ex) { // Catch-all exception firewall
/* /*
* A catch-all exception firewall. Note that a runtime * Note that an exception from a module does not stop
* exception from a single module does not stop
* processing of the task by the other modules in the * processing of the task by the other modules in the
* pipeline. * pipeline.
*/ */
errors.add(new IngestModuleError(module.getDisplayName(), ex)); errors.add(new IngestModuleError(module.getDisplayName(), ex));
} }
if (ingestJobPipeline.isCancelled()) { if (ingestJobExecutor.isCancelled()) {
break; break;
} }
} }
} }
try { try {
cleanUpAfterTask(task); cleanUpAfterTask(task);
} catch (IngestTaskPipelineException ex) { } catch (IngestPipelineException ex) {
errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS
} }
} else { } else {
errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline not started or shut down"))); //NON-NLS errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline not started or shut down"))); //NON-NLS
} }
currentModule = null; currentModule = null;
return errors; return errors;
} }
/** /**
* Pauses task execution if ingest has been configured to be paused weekly * Pauses this pipeline if ingest has been configured to be paused weekly at
* at a specified time for a specified duration. * a specified time, for a specified duration. A pipeline can only be paused
* between calls to module process() methods, i.e., the individual modules
* themselves cannot be paused in the middle of processing a task.
*/ */
private void pauseIfScheduled() { private void pauseIfScheduled() {
if (ScheduledIngestPauseSettings.getPauseEnabled() == true) { if (ScheduledIngestPauseSettings.getPauseEnabled() == true) {
@ -278,7 +276,7 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
*/ */
LocalDateTime timeNow = LocalDateTime.now(); LocalDateTime timeNow = LocalDateTime.now();
if ((timeNow.equals(pauseStart) || timeNow.isAfter(pauseStart)) && timeNow.isBefore(pauseEnd)) { if ((timeNow.equals(pauseStart) || timeNow.isAfter(pauseStart)) && timeNow.isBefore(pauseEnd)) {
ingestJobPipeline.registerPausedIngestThread(Thread.currentThread()); ingestJobExecutor.registerPausedIngestThread(Thread.currentThread());
try { try {
long timeRemainingMillis = ChronoUnit.MILLIS.between(timeNow, pauseEnd); long timeRemainingMillis = ChronoUnit.MILLIS.between(timeNow, pauseEnd);
logger.log(Level.INFO, String.format("%s pausing at %s for ~%d minutes", Thread.currentThread().getName(), LocalDateTime.now(), TimeUnit.MILLISECONDS.toMinutes(timeRemainingMillis))); logger.log(Level.INFO, String.format("%s pausing at %s for ~%d minutes", Thread.currentThread().getName(), LocalDateTime.now(), TimeUnit.MILLISECONDS.toMinutes(timeRemainingMillis)));
@ -287,27 +285,27 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} catch (InterruptedException notLogged) { } catch (InterruptedException notLogged) {
logger.log(Level.INFO, String.format("%s resuming at %s due to sleep interrupt (ingest job canceled)", Thread.currentThread().getName(), LocalDateTime.now())); logger.log(Level.INFO, String.format("%s resuming at %s due to sleep interrupt (ingest job canceled)", Thread.currentThread().getName(), LocalDateTime.now()));
} finally { } finally {
ingestJobPipeline.unregisterPausedIngestThread(Thread.currentThread()); ingestJobExecutor.unregisterPausedIngestThread(Thread.currentThread());
} }
} }
} }
} }
/** /**
* Does any task type specific preparation required before executing an * Does any task-type-specific preparation required before performing an
* ingest task. * ingest task.
* *
* @param task The task. * @param task The task.
* *
* @throws IngestTaskPipelineException Thrown if there is an error preparing * @throws IngestPipelineException Thrown if there is an error preparing to
* to execute the task. * perform the task.
*/ */
abstract void prepareForTask(T task) throws IngestTaskPipelineException; abstract void prepareForTask(T task) throws IngestPipelineException;
/** /**
* Gets the currently running ingest module. * Gets the currently running ingest module.
* *
* @return The module, possibly null if no module is currently running. * @return The module, possibly null, if no module is currently running.
*/ */
PipelineModule<T> getCurrentlyRunningModule() { PipelineModule<T> getCurrentlyRunningModule() {
return currentModule; return currentModule;
@ -345,22 +343,19 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* Does any task type specific clean up required after executing an ingest * Does any task-type-specific clean up required after performing an ingest
* task. * task.
* *
* @param task The task. * @param task The task.
* *
* @throws IngestTaskPipelineException Thrown if there is an error cleaning * @throws IngestPipelineException Thrown if there is an error cleaning up
* up after performing the task. * after performing the task.
*/ */
abstract void cleanUpAfterTask(T task) throws IngestTaskPipelineException; abstract void cleanUpAfterTask(T task) throws IngestPipelineException;
/** /**
* An abstract superclass for a decorator that adds ingest infrastructure * An abstract superclass for an ingest module decorator that adds ingest
* operations to an ingest module. * infrastructure operations to an ingest module.
*
* IMPORTANT: Subclasses of IngestTaskPipeline need to implement a
* specialization this class
*/ */
static abstract class PipelineModule<T extends IngestTask> implements IngestModule { static abstract class PipelineModule<T extends IngestTask> implements IngestModule {
@ -369,16 +364,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
private volatile Date processingStartTime; private volatile Date processingStartTime;
/** /**
* Constructs an instance of an abstract superclass for a decorator that * Constructs an instance of an abstract superclass for an ingest module
* adds ingest infrastructure operations to an ingest module. * decorator that adds ingest infrastructure operations to an ingest
* module.
* *
* @param module The ingest module to be wrapped. * @param module The ingest module to be decorated.
* @param displayName The display name for the module. * @param displayName The display name for the module.
*/ */
PipelineModule(IngestModule module, String displayName) { PipelineModule(IngestModule module, String displayName) {
this.module = module; this.module = module;
this.displayName = displayName; this.displayName = displayName;
this.processingStartTime = new Date(); processingStartTime = new Date();
} }
/** /**
@ -410,8 +406,8 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
/** /**
* Gets the the processing start time for the decorated module. * Gets the the processing start time for the decorated module.
* *
* @return The start time, will be null if the module has not started * @return The start time, not valid if setProcessingStartTime() has not
* processing the data source yet. * been called first.
*/ */
Date getProcessingStartTime() { Date getProcessingStartTime() {
return new Date(processingStartTime.getTime()); return new Date(processingStartTime.getTime());
@ -423,17 +419,17 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* Executes an ingest task using the process() method of the decorated * Performs an ingest task using the process() method of the decorated
* module. * module.
* *
* @param ingestJobPipeline The ingest job pipeline that owns the ingest * @param ingestJobExecutor The ingest job executor that owns the ingest
* task pipeline this module belongs to. * pipeline to which this module belongs.
* @param task The task to execute. * @param task The task to perform.
* *
* @throws IngestModuleException Exception thrown if there is an error * @throws IngestModuleException Exception thrown if there is an error
* performing the task. * performing the task.
*/ */
abstract void executeTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException; abstract void process(IngestJobExecutor ingestJobExecutor, T task) throws IngestModuleException;
@Override @Override
public void shutDown() { public void shutDown() {
@ -443,28 +439,28 @@ abstract class IngestTaskPipeline<T extends IngestTask> {
} }
/** /**
* An exception thrown by an ingest task pipeline. * An exception thrown by an ingest pipeline.
*/ */
public static class IngestTaskPipelineException extends Exception { static class IngestPipelineException extends Exception {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
/** /**
* Constructs an exception to be thrown by an ingest task pipeline. * Constructs an exception to be thrown by an ingest pipeline.
* *
* @param message The exception message. * @param message The exception message.
*/ */
public IngestTaskPipelineException(String message) { IngestPipelineException(String message) {
super(message); super(message);
} }
/** /**
* Constructs an exception to be thrown by an ingest task pipeline. * Constructs an exception to be thrown by an ingest pipeline.
* *
* @param message The exception message. * @param message The exception message.
* @param cause The exception cause. * @param cause The exception cause.
*/ */
public IngestTaskPipelineException(String message, Throwable cause) { IngestPipelineException(String message, Throwable cause) {
super(message, cause); super(message, cause);
} }

View File

@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.SleuthkitCase;
*/ */
public final class IngestServices { public final class IngestServices {
private static Logger logger = Logger.getLogger(IngestServices.class.getName()); private final static Logger logger = Logger.getLogger(IngestServices.class.getName());
private static IngestServices instance = null; private static IngestServices instance = null;
/** /**
@ -115,7 +115,7 @@ public final class IngestServices {
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
try { try {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName()); blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName(), null);
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Failed to post artifacts", ex); logger.log(Level.SEVERE, "Failed to post artifacts", ex);
} }

View File

@ -22,37 +22,35 @@ import org.sleuthkit.datamodel.Content;
/** /**
* An ingest task that will be executed by an ingest thread using a given ingest * An ingest task that will be executed by an ingest thread using a given ingest
* job pipeline. Three examples of concrete types of ingest tasks are tasks to * job executor. Three examples of concrete types of ingest tasks are tasks to
* analyze a data source, tasks to analyze the files in a data source, and tasks * analyze a data source, tasks to analyze the files in a data source, and tasks
* that analyze data artifacts. * to analyze data artifacts.
*/ */
abstract class IngestTask { abstract class IngestTask {
private final static long NOT_SET = Long.MIN_VALUE; private final static long NOT_SET = Long.MIN_VALUE;
private final IngestJobPipeline ingestJobPipeline; private final IngestJobExecutor ingestJobExecutor;
private long threadId; private long threadId;
/** /**
* Constructs an ingest task that will be executed by an ingest thread using * Constructs an ingest task that will be executed by an ingest thread using
* a given ingest job pipeline. Three examples of concrete types of ingest * a given ingest job executor.
* tasks are tasks to analyze a data source, tasks to analyze the files in a
* data source, and tasks that analyze data artifacts.
* *
* @param ingestJobPipeline The ingest job pipeline to use to execute the * @param ingestJobExecutor The ingest job executor to use to execute the
* task. * task.
*/ */
IngestTask(IngestJobPipeline ingestJobPipeline) { IngestTask(IngestJobExecutor ingestJobExecutor) {
this.ingestJobPipeline = ingestJobPipeline; this.ingestJobExecutor = ingestJobExecutor;
threadId = NOT_SET; threadId = NOT_SET;
} }
/** /**
* Gets the ingest job pipeline used to complete this task. * Gets the ingest job executor to use to execute this task.
* *
* @return The ingest job pipeline. * @return The ingest job executor.
*/ */
IngestJobPipeline getIngestJobPipeline() { IngestJobExecutor getIngestJobExecutor() {
return ingestJobPipeline; return ingestJobExecutor;
} }
/** /**
@ -61,7 +59,7 @@ abstract class IngestTask {
* @return The data source. * @return The data source.
*/ */
Content getDataSource() { Content getDataSource() {
return getIngestJobPipeline().getDataSource(); return getIngestJobExecutor().getDataSource();
} }
/** /**
@ -84,8 +82,8 @@ abstract class IngestTask {
/** /**
* Records the ingest thread ID of the calling thread and executes this task * Records the ingest thread ID of the calling thread and executes this task
* using the ingest job pipeline specified when the task was created. The * using the ingest job executor specified when the task was created. The
* implementation of the method should simple call * implementation of the method should simply call
* super.setThreadId(threadId) and getIngestJobPipeline().process(this). * super.setThreadId(threadId) and getIngestJobPipeline().process(this).
* *
* @param threadId The numeric ID of the ingest thread executing this task. * @param threadId The numeric ID of the ingest thread executing this task.

View File

@ -138,7 +138,7 @@ final class IngestTasksScheduler {
* task to the pipeline for processing by the * task to the pipeline for processing by the
* pipeline's ingest modules. * pipeline's ingest modules.
*/ */
synchronized void scheduleIngestTasks(IngestJobPipeline ingestPipeline) { synchronized void scheduleIngestTasks(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
if (ingestPipeline.hasDataSourceIngestModules()) { if (ingestPipeline.hasDataSourceIngestModules()) {
scheduleDataSourceIngestTask(ingestPipeline); scheduleDataSourceIngestTask(ingestPipeline);
@ -163,7 +163,7 @@ final class IngestTasksScheduler {
* task to the pipeline for processing by the * task to the pipeline for processing by the
* pipeline's ingest modules. * pipeline's ingest modules.
*/ */
synchronized void scheduleDataSourceIngestTask(IngestJobPipeline ingestPipeline) { synchronized void scheduleDataSourceIngestTask(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
DataSourceIngestTask task = new DataSourceIngestTask(ingestPipeline); DataSourceIngestTask task = new DataSourceIngestTask(ingestPipeline);
try { try {
@ -190,7 +190,7 @@ final class IngestTasksScheduler {
* empty, then all if the files from the data source * empty, then all if the files from the data source
* are candidates for scheduling. * are candidates for scheduling.
*/ */
synchronized void scheduleFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) { synchronized void scheduleFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
Collection<AbstractFile> candidateFiles; Collection<AbstractFile> candidateFiles;
if (files.isEmpty()) { if (files.isEmpty()) {
@ -220,7 +220,7 @@ final class IngestTasksScheduler {
* processing by the pipeline's ingest modules. * processing by the pipeline's ingest modules.
* @param files A list of file object IDs for the streamed files. * @param files A list of file object IDs for the streamed files.
*/ */
synchronized void scheduleStreamedFileIngestTasks(IngestJobPipeline ingestPipeline, List<Long> fileIds) { synchronized void scheduleStreamedFileIngestTasks(IngestJobExecutor ingestPipeline, List<Long> fileIds) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
for (long id : fileIds) { for (long id : fileIds) {
/* /*
@ -252,7 +252,7 @@ final class IngestTasksScheduler {
* processing by the pipeline's ingest modules. * processing by the pipeline's ingest modules.
* @param files The files. * @param files The files.
*/ */
synchronized void fastTrackFileIngestTasks(IngestJobPipeline ingestPipeline, Collection<AbstractFile> files) { synchronized void fastTrackFileIngestTasks(IngestJobExecutor ingestPipeline, Collection<AbstractFile> files) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
/* /*
* Put the files directly into the queue for the file ingest * Put the files directly into the queue for the file ingest
@ -290,7 +290,7 @@ final class IngestTasksScheduler {
* target Content of the task to the pipeline for * target Content of the task to the pipeline for
* processing by the pipeline's ingest modules. * processing by the pipeline's ingest modules.
*/ */
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline) { synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
Blackboard blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
try { try {
@ -318,7 +318,7 @@ final class IngestTasksScheduler {
* source; if empty, then all of the data artifacts * source; if empty, then all of the data artifacts
* from the data source will be scheduled. * from the data source will be scheduled.
*/ */
synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline, List<DataArtifact> artifacts) { synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline, List<DataArtifact> artifacts) {
if (!ingestPipeline.isCancelled()) { if (!ingestPipeline.isCancelled()) {
for (DataArtifact artifact : artifacts) { for (DataArtifact artifact : artifacts) {
DataArtifactIngestTask task = new DataArtifactIngestTask(ingestPipeline, artifact); DataArtifactIngestTask task = new DataArtifactIngestTask(ingestPipeline, artifact);
@ -373,7 +373,7 @@ final class IngestTasksScheduler {
* *
* @return True or false. * @return True or false.
*/ */
synchronized boolean currentTasksAreCompleted(IngestJobPipeline ingestPipeline) { synchronized boolean currentTasksAreCompleted(IngestJobExecutor ingestPipeline) {
long pipelineId = ingestPipeline.getIngestJobId(); long pipelineId = ingestPipeline.getIngestJobId();
return !(dataSourceIngestTasksQueue.hasTasksForJob(pipelineId) return !(dataSourceIngestTasksQueue.hasTasksForJob(pipelineId)
|| hasTasksForJob(topLevelFileIngestTasksQueue, pipelineId) || hasTasksForJob(topLevelFileIngestTasksQueue, pipelineId)
@ -402,7 +402,7 @@ final class IngestTasksScheduler {
* *
* @param ingestJobPipeline The ingest pipeline for the job. * @param ingestJobPipeline The ingest pipeline for the job.
*/ */
synchronized void cancelPendingFileTasksForIngestJob(IngestJobPipeline ingestJobPipeline) { synchronized void cancelPendingFileTasksForIngestJob(IngestJobExecutor ingestJobPipeline) {
long jobId = ingestJobPipeline.getIngestJobId(); long jobId = ingestJobPipeline.getIngestJobId();
removeTasksForJob(topLevelFileIngestTasksQueue, jobId); removeTasksForJob(topLevelFileIngestTasksQueue, jobId);
removeTasksForJob(batchedFileIngestTasksQueue, jobId); removeTasksForJob(batchedFileIngestTasksQueue, jobId);
@ -549,7 +549,7 @@ final class IngestTasksScheduler {
for (Content child : file.getChildren()) { for (Content child : file.getChildren()) {
if (child instanceof AbstractFile) { if (child instanceof AbstractFile) {
AbstractFile childFile = (AbstractFile) child; AbstractFile childFile = (AbstractFile) child;
FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobPipeline(), childFile); FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobExecutor(), childFile);
if (childFile.hasChildren()) { if (childFile.hasChildren()) {
batchedFileIngestTasksQueue.add(childTask); batchedFileIngestTasksQueue.add(childTask);
} else if (shouldEnqueueFileTask(childTask)) { } else if (shouldEnqueueFileTask(childTask)) {
@ -668,7 +668,7 @@ final class IngestTasksScheduler {
private static boolean shouldBeCarved(final FileIngestTask task) { private static boolean shouldBeCarved(final FileIngestTask task) {
try { try {
AbstractFile file = task.getFile(); AbstractFile file = task.getFile();
return task.getIngestJobPipeline().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS); return task.getIngestJobExecutor().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
return false; return false;
} }
@ -685,7 +685,7 @@ final class IngestTasksScheduler {
private static boolean fileAcceptedByFilter(final FileIngestTask task) { private static boolean fileAcceptedByFilter(final FileIngestTask task) {
try { try {
AbstractFile file = task.getFile(); AbstractFile file = task.getFile();
return !(task.getIngestJobPipeline().getFileIngestFilter().fileIsMemberOf(file) == null); return !(task.getIngestJobExecutor().getFileIngestFilter().fileIsMemberOf(file) == null);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
return false; return false;
} }
@ -702,7 +702,7 @@ final class IngestTasksScheduler {
*/ */
synchronized private static boolean hasTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) { synchronized private static boolean hasTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
for (IngestTask task : tasks) { for (IngestTask task : tasks) {
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
return true; return true;
} }
} }
@ -720,7 +720,7 @@ final class IngestTasksScheduler {
Iterator<? extends IngestTask> iterator = tasks.iterator(); Iterator<? extends IngestTask> iterator = tasks.iterator();
while (iterator.hasNext()) { while (iterator.hasNext()) {
IngestTask task = iterator.next(); IngestTask task = iterator.next();
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
iterator.remove(); iterator.remove();
} }
} }
@ -738,7 +738,7 @@ final class IngestTasksScheduler {
private static int countTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) { private static int countTasksForJob(Collection<? extends IngestTask> tasks, long pipelineId) {
int count = 0; int count = 0;
for (IngestTask task : tasks) { for (IngestTask task : tasks) {
if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) {
count++; count++;
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2015-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -35,8 +35,9 @@ import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
* Event published when new data is posted to the blackboard of a case. The * An event published when a new artifact (data artifact or analysis result) is
* "old" value is a legacy ModuleDataEvent object. The "new" value is null. * posted to the blackboard. The "old" value is a legacy ModuleDataEvent object.
* The "new" value is null.
*/ */
public final class BlackboardPostEvent extends AutopsyEvent implements Serializable { public final class BlackboardPostEvent extends AutopsyEvent implements Serializable {
@ -45,14 +46,15 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
private transient ModuleDataEvent eventData; private transient ModuleDataEvent eventData;
/** /**
* Constructs an event to be published when new content is added to a case * Constructs an event published when a new artifact (data artifact or
* or there is a change a recorded attribute of existing content. * analysis result) is posted to the blackboard. The "old" value is a legacy
* ModuleDataEvent object. The "new" value is null.
* *
* @param eventData A ModuleDataEvent object containing the data associated * @param eventData A ModuleDataEvent object containing the data associated
* with the blackboard post. * with the blackboard post.
*/ */
public BlackboardPostEvent(ModuleDataEvent eventData) { public BlackboardPostEvent(ModuleDataEvent eventData) {
/** /*
* Putting a serializable data holding object into oldValue to allow for * Putting a serializable data holding object into oldValue to allow for
* lazy loading of the ModuleDataEvent object for remote events. This * lazy loading of the ModuleDataEvent object for remote events. This
* bypasses the issues related to the serialization and de-serialization * bypasses the issues related to the serialization and de-serialization
@ -78,13 +80,13 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa
*/ */
@Override @Override
public Object getOldValue() { public Object getOldValue() {
/** /*
* The eventData field is set in the constructor, but it is transient so * The eventData field is set in the constructor, but it is transient,
* it will become null when the event is serialized for publication over * so it will become null when the event is serialized for publication
* a network. Doing a lazy load of the ModuleDataEvent object bypasses * over a network. Doing a lazy load of the ModuleDataEvent object
* the issues related to the serialization and de-serialization of * bypasses the issues related to the serialization and de-serialization
* BlackboardArtifact objects and may also save database round trips * of BlackboardArtifact objects and may also save database round trips
* from other nodes since subscribers to this event are often not * from other hosts since subscribers to this event are often not
* interested in the event data. * interested in the event data.
*/ */
if (null != eventData) { if (null != eventData) {

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -513,8 +513,7 @@ final class AddLogicalImageTask implements Runnable {
private void postArtifacts(List<BlackboardArtifact> artifacts) { private void postArtifacts(List<BlackboardArtifact> artifacts) {
try { try {
// index the artifact for keyword search blackboard.postArtifacts(artifacts, MODULE_NAME, null);
blackboard.postArtifacts(artifacts, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013-2019 Basis Technology Corp. * Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -303,7 +303,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
.getAnalysisResult(); .getAnalysisResult();
Case.getCurrentCase().getServices().getArtifactsBlackboard() Case.getCurrentCase().getServices().getArtifactsBlackboard()
.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); .postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName(), context.getJobId());
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex); logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -114,7 +114,7 @@ final class DATExtractor extends DroneExtractor {
GeoTrackPoints trackPoints = processCSVFile(context, DATFile, csvFilePath); GeoTrackPoints trackPoints = processCSVFile(context, DATFile, csvFilePath);
if (trackPoints != null && !trackPoints.isEmpty()) { if (trackPoints != null && !trackPoints.isEmpty()) {
(new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile)).addTrack(DATFile.getName(), trackPoints, null); (new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile, context.getJobId())).addTrack(DATFile.getName(), trackPoints, null);
} else { } else {
logger.log(Level.INFO, String.format("No trackpoints with valid longitude or latitude found in %s", DATFile.getName())); //NON-NLS logger.log(Level.INFO, String.format("No trackpoints with valid longitude or latitude found in %s", DATFile.getName())); //NON-NLS
} }

View File

@ -341,7 +341,7 @@ class SevenZipExtractor {
* keyword search, and fire an event to notify UI of this * keyword search, and fire an event to notify UI of this
* new artifact * new artifact
*/ */
blackboard.postArtifact(artifact, MODULE_NAME); blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
String msg = NbBundle.getMessage(SevenZipExtractor.class, String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
@ -870,7 +870,7 @@ class SevenZipExtractor {
* keyword search, and fire an event to notify UI of this * keyword search, and fire an event to notify UI of this
* new artifact * new artifact
*/ */
blackboard.postArtifact(artifact, MODULE_NAME); blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -172,7 +172,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
* post the artifact which will index the artifact for keyword * post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact * search, and fire an event to notify UI of this new artifact
*/ */
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2017-2018 Basis Technology Corp. * Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -212,7 +212,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* post the artifact which will index the artifact for keyword * post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact * search, and fire an event to notify UI of this new artifact
*/ */
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -155,7 +155,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
* keyword search, and fire an event to notify UI of this * keyword search, and fire an event to notify UI of this
* new artifact * new artifact
*/ */
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName()); blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName(), jobId);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message()); MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013-2018 Basis Technology Corp. * Copyright 2013-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -206,7 +206,7 @@ public class FileTypeIdIngestModule implements FileIngestModule {
* keyword search, and fire an event to notify UI of this * keyword search, and fire an event to notify UI of this
* new artifact * new artifact
*/ */
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName()); tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName(), jobId);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -43,13 +42,11 @@ import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.HashHitInfo; import org.sleuthkit.datamodel.HashHitInfo;
import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.Score.Significance;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -552,7 +549,7 @@ public class HashDbIngestModule implements FileIngestModule {
* post the artifact which will index the artifact for keyword * post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact * search, and fire an event to notify UI of this new artifact
*/ */
blackboard.postArtifact(badFile, moduleName); blackboard.postArtifact(badFile, moduleName, jobId);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2014-2018 Basis Technology Corp. * Copyright 2014-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -151,7 +151,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
try { try {
// Post thet artifact to the blackboard. // Post thet artifact to the blackboard.
blackboard.postArtifact(artifact, MODULE_NAME); blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName()); MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -101,7 +101,7 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule {
} }
try { try {
aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName()); aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName(), context);
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) { } catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex); throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -101,7 +101,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
} }
try { try {
iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName()); iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName(), context);
} catch (IOException | IngestModuleException | NoCurrentCaseException ex) { } catch (IOException | IngestModuleException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex); throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex);
} }
@ -333,7 +333,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
* added to reports * added to reports
*/ */
private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) { private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) {
List<String> allIndexFiles = new ArrayList<>(); List<String> allIndexFiles;
try (Stream<Path> walk = Files.walk(iLeappOutputDir)) { try (Stream<Path> walk = Files.walk(iLeappOutputDir)) {
@ -402,7 +402,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule {
String fileName = FilenameUtils.getName(ffp); String fileName = FilenameUtils.getName(ffp);
String filePath = FilenameUtils.getPath(ffp); String filePath = FilenameUtils.getPath(ffp);
List<AbstractFile> iLeappFiles = new ArrayList<>(); List<AbstractFile> iLeappFiles;
try { try {
if (filePath.isEmpty()) { if (filePath.isEmpty()) {
iLeappFiles = fileManager.findFiles(dataSource, fileName); //NON-NLS iLeappFiles = fileManager.findFiles(dataSource, fileName); //NON-NLS

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -61,6 +61,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.NetworkUtils;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
@ -144,6 +145,7 @@ public final class LeappFileProcessor {
private static final Logger logger = Logger.getLogger(LeappFileProcessor.class.getName()); private static final Logger logger = Logger.getLogger(LeappFileProcessor.class.getName());
private final String xmlFile; //NON-NLS private final String xmlFile; //NON-NLS
private final String moduleName; private final String moduleName;
private final IngestJobContext context;
private final Map<String, String> tsvFiles; private final Map<String, String> tsvFiles;
private final Map<String, BlackboardArtifact.Type> tsvFileArtifacts; private final Map<String, BlackboardArtifact.Type> tsvFileArtifacts;
@ -192,15 +194,16 @@ public final class LeappFileProcessor {
.put("call history.tsv", "calllog") .put("call history.tsv", "calllog")
.build(); .build();
Blackboard blkBoard; private final Blackboard blkBoard;
public LeappFileProcessor(String xmlFile, String moduleName) throws IOException, IngestModuleException, NoCurrentCaseException { public LeappFileProcessor(String xmlFile, String moduleName, IngestJobContext context) throws IOException, IngestModuleException, NoCurrentCaseException {
this.tsvFiles = new HashMap<>(); this.tsvFiles = new HashMap<>();
this.tsvFileArtifacts = new HashMap<>(); this.tsvFileArtifacts = new HashMap<>();
this.tsvFileArtifactComments = new HashMap<>(); this.tsvFileArtifactComments = new HashMap<>();
this.tsvFileAttributes = new HashMap<>(); this.tsvFileAttributes = new HashMap<>();
this.xmlFile = xmlFile; this.xmlFile = xmlFile;
this.moduleName = moduleName; this.moduleName = moduleName;
this.context = context;
blkBoard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blkBoard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
@ -218,7 +221,8 @@ public final class LeappFileProcessor {
"LeappFileProcessor.has.run=Leapp", "LeappFileProcessor.has.run=Leapp",
"LeappFileProcessor.Leapp.cancelled=Leapp run was canceled", "LeappFileProcessor.Leapp.cancelled=Leapp run was canceled",
"LeappFileProcessor.completed=Leapp Processing Completed", "LeappFileProcessor.completed=Leapp Processing Completed",
"LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"}) "LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"
})
public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile LeappFile) { public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile LeappFile) {
try { try {
List<String> LeappTsvOutputFiles = findTsvFiles(moduleOutputPath); List<String> LeappTsvOutputFiles = findTsvFiles(moduleOutputPath);
@ -249,7 +253,7 @@ public final class LeappFileProcessor {
* we know we want to process and return the list to process those files. * we know we want to process and return the list to process those files.
*/ */
private List<String> findTsvFiles(Path LeappOutputDir) throws IngestModuleException { private List<String> findTsvFiles(Path LeappOutputDir) throws IngestModuleException {
List<String> allTsvFiles = new ArrayList<>(); List<String> allTsvFiles;
List<String> foundTsvFiles = new ArrayList<>(); List<String> foundTsvFiles = new ArrayList<>();
try (Stream<Path> walk = Files.walk(LeappOutputDir)) { try (Stream<Path> walk = Files.walk(LeappOutputDir)) {
@ -347,10 +351,10 @@ public final class LeappFileProcessor {
AbstractFile geoAbstractFile = null; AbstractFile geoAbstractFile = null;
if (LeappFile == null || !LeappFile.exists() || fileName == null) { if (LeappFile == null || !LeappFile.exists() || fileName == null) {
logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile == null ? LeappFile.toString() : "<null>")); logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile != null ? LeappFile.toString() : "<null>"));
return; return;
} else if (attrList == null || artifactType == null || dataSource == null) { } else if (attrList == null || artifactType == null || dataSource == null) {
logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile == null ? LeappFile.toString() : "<null>")); logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile.toString()));
return; return;
} }
@ -407,9 +411,8 @@ public final class LeappFileProcessor {
} }
try { try {
if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase() == "trackpoint") { if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase().equals("trackpoint")) {
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile)).addTrack(trackpointSegmentName, pointList, new ArrayList<>()); (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile, context.getJobId())).addTrack(trackpointSegmentName, pointList, new ArrayList<>());
} }
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -418,9 +421,8 @@ public final class LeappFileProcessor {
} }
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact.", "LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact."
}) })
private void createRoute(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException { private void createRoute(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
Double startLatitude = Double.valueOf(0); Double startLatitude = Double.valueOf(0);
@ -433,7 +435,7 @@ public final class LeappFileProcessor {
Long dateTime = Long.valueOf(0); Long dateTime = Long.valueOf(0);
Collection<BlackboardAttribute> otherAttributes = new ArrayList<>(); Collection<BlackboardAttribute> otherAttributes = new ArrayList<>();
String sourceFile = null; String sourceFile = null;
AbstractFile absFile = null; AbstractFile absFile;
String comment = ""; String comment = "";
try { try {
@ -478,19 +480,17 @@ public final class LeappFileProcessor {
GeoWaypoints waypointList = new GeoWaypoints(); GeoWaypoints waypointList = new GeoWaypoints();
waypointList.addPoint(new Waypoint(startLatitude, startLongitude, zeroValue, "")); waypointList.addPoint(new Waypoint(startLatitude, startLongitude, zeroValue, ""));
waypointList.addPoint(new Waypoint(endLatitude, endLongitude, zeroValue, locationName)); waypointList.addPoint(new Waypoint(endLatitude, endLongitude, zeroValue, locationName));
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addRoute(destinationName, dateTime, waypointList, new ArrayList<>()); (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addRoute(destinationName, dateTime, waypointList, new ArrayList<>());
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_waypoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_waypoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
} }
} }
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact.", "LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact."
}) })
private AbstractFile createTrackpoint(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName, String trackpointSegmentName, GeoTrackPoints pointList) throws IngestModuleException { private AbstractFile createTrackpoint(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName, String trackpointSegmentName, GeoTrackPoints pointList) throws IngestModuleException {
Double latitude = Double.valueOf(0); Double latitude = Double.valueOf(0);
@ -538,13 +538,10 @@ public final class LeappFileProcessor {
if (absFile == null) { if (absFile == null) {
absFile = (AbstractFile) dataSource; absFile = (AbstractFile) dataSource;
} }
if ((trackpointSegmentName == null) || (trackpointSegmentName == segmentName)) { if ((trackpointSegmentName == null) || (trackpointSegmentName.equals(segmentName))) {
trackpointSegmentName = segmentName;
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
} else { } else {
(new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addTrack(segmentName, pointList, new ArrayList<>()); (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addTrack(segmentName, pointList, new ArrayList<>());
trackpointSegmentName = segmentName;
pointList = new GeoTrackPoints();
pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime));
} }
@ -556,11 +553,9 @@ public final class LeappFileProcessor {
} }
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship.", "LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship."
}) })
private void createMessageRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException { private void createMessageRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
String messageType = null; String messageType = null;
@ -577,7 +572,7 @@ public final class LeappFileProcessor {
List<BlackboardAttribute> otherAttributes = new ArrayList<>(); List<BlackboardAttribute> otherAttributes = new ArrayList<>();
List<FileAttachment> fileAttachments = new ArrayList<>(); List<FileAttachment> fileAttachments = new ArrayList<>();
String sourceFile = null; String sourceFile = null;
MessageAttachments messageAttachments = null; MessageAttachments messageAttachments;
try { try {
for (BlackboardAttribute bba : bbattributes) { for (BlackboardAttribute bba : bbattributes) {
@ -639,21 +634,21 @@ public final class LeappFileProcessor {
if (absFile == null) { if (absFile == null) {
absFile = (AbstractFile) dataSource; absFile = (AbstractFile) dataSource;
} }
CommunicationArtifactsHelper accountArtifact; CommunicationArtifactsHelper accountHelper;
Account.Type accountType = getAccountType(fileName); Account.Type accountType = getAccountType(fileName);
if (alternateId == null) { if (alternateId == null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType); moduleName, absFile, accountType, context.getJobId());
} else { } else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId); moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
} }
BlackboardArtifact messageArtifact = accountArtifact.addMessage(messageType, communicationDirection, senderId, BlackboardArtifact messageArtifact = accountHelper.addMessage(messageType, communicationDirection, senderId,
receipentId, dateTime, messageStatus, subject, receipentId, dateTime, messageStatus, subject,
messageText, threadId, otherAttributes); messageText, threadId, otherAttributes);
if (!fileAttachments.isEmpty()) { if (!fileAttachments.isEmpty()) {
messageAttachments = new MessageAttachments(fileAttachments, new ArrayList<>()); messageAttachments = new MessageAttachments(fileAttachments, new ArrayList<>());
accountArtifact.addAttachments(messageArtifact, messageAttachments); accountHelper.addAttachments(messageArtifact, messageAttachments);
} }
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -662,7 +657,7 @@ public final class LeappFileProcessor {
} }
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship.", "LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship."
}) })
private void createContactRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException { private void createContactRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
@ -716,15 +711,15 @@ public final class LeappFileProcessor {
Account.Type accountType = getAccountType(fileName); Account.Type accountType = getAccountType(fileName);
if (accountType != null) { if (accountType != null) {
CommunicationArtifactsHelper accountArtifact; CommunicationArtifactsHelper accountHelper;
if (alternateId == null) { if (alternateId == null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType); moduleName, absFile, accountType, context.getJobId());
} else { } else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId); moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
} }
BlackboardArtifact messageArtifact = accountArtifact.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes); BlackboardArtifact messageArtifact = accountHelper.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes);
} }
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_contact_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_contact_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
@ -732,9 +727,8 @@ public final class LeappFileProcessor {
} }
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship.", "LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship."
}) })
private void createCalllogRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException { private void createCalllogRelationship(Collection<BlackboardAttribute> bbattributes, Content dataSource, String fileName) throws IngestModuleException {
String callerId = null; String callerId = null;
@ -787,7 +781,7 @@ public final class LeappFileProcessor {
} }
} }
if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING) { if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING && callerId != null) {
String[] calleeTempList = callerId.split(",", 0); String[] calleeTempList = callerId.split(",", 0);
calleeId = Arrays.asList(calleeTempList); calleeId = Arrays.asList(calleeTempList);
callerId = null; callerId = null;
@ -797,15 +791,15 @@ public final class LeappFileProcessor {
absFile = (AbstractFile) dataSource; absFile = (AbstractFile) dataSource;
} }
Account.Type accountType = getAccountType(fileName); Account.Type accountType = getAccountType(fileName);
CommunicationArtifactsHelper accountArtifact; CommunicationArtifactsHelper accountHelper;
if (accountType != null) { if (accountType != null) {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType); moduleName, absFile, accountType, context.getJobId());
} else { } else {
accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(),
moduleName, absFile, accountType, accountType, alternateId); moduleName, absFile, accountType, accountType, alternateId, context.getJobId());
} }
BlackboardArtifact callLogArtifact = accountArtifact.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes); accountHelper.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes);
} catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) {
throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_calllog_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_calllog_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS
} }
@ -886,14 +880,16 @@ public final class LeappFileProcessor {
* *
* @param lineValues List of column values. * @param lineValues List of column values.
* @param columnIndexes Mapping of column headers (trimmed; to lower case) * @param columnIndexes Mapping of column headers (trimmed; to lower case)
* to column index. All header columns and only all header columns should be * to column index. All header columns and only all
* present. * header columns should be present.
* @param attrList The list of attributes as specified for the schema of * @param attrList The list of attributes as specified for the schema
* this file. * of this file.
* @param fileName The name of the file being processed. * @param fileName The name of the file being processed.
* @param lineNum The line number in the file. * @param lineNum The line number in the file.
*
* @return The collection of blackboard attributes for the artifact created * @return The collection of blackboard attributes for the artifact created
* from this line. * from this line.
*
* @throws IngestModuleException * @throws IngestModuleException
*/ */
private Collection<BlackboardAttribute> processReadLine(List<String> lineValues, Map<String, Integer> columnIndexes, private Collection<BlackboardAttribute> processReadLine(List<String> lineValues, Map<String, Integer> columnIndexes,
@ -930,7 +926,7 @@ public final class LeappFileProcessor {
String formattedValue = formatValueBasedOnAttrType(colAttr, value); String formattedValue = formatValueBasedOnAttrType(colAttr, value);
BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName); BlackboardAttribute attr = getAttribute(colAttr.getAttributeType(), formattedValue, fileName);
if (attr == null) { if (attr == null) {
logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName)); logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName));
return Collections.emptyList(); return Collections.emptyList();
@ -950,6 +946,7 @@ public final class LeappFileProcessor {
* *
* @param colAttr Column Attribute information * @param colAttr Column Attribute information
* @param value string to be formatted * @param value string to be formatted
*
* @return formatted string based on attribute type if no attribute type * @return formatted string based on attribute type if no attribute type
* found then return original string * found then return original string
*/ */
@ -974,6 +971,7 @@ public final class LeappFileProcessor {
* @param value The string value to be converted to the appropriate data * @param value The string value to be converted to the appropriate data
* type for the attribute type. * type for the attribute type.
* @param fileName The file name that the value comes from. * @param fileName The file name that the value comes from.
*
* @return The generated blackboard attribute. * @return The generated blackboard attribute.
*/ */
private BlackboardAttribute getAttribute(BlackboardAttribute.Type attrType, String value, String fileName) { private BlackboardAttribute getAttribute(BlackboardAttribute.Type attrType, String value, String fileName) {
@ -998,7 +996,7 @@ public final class LeappFileProcessor {
(v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v).longValue())); (v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v).longValue()));
case DOUBLE: case DOUBLE:
return parseAttrValue(value.trim(), attrType, fileName, true, false, return parseAttrValue(value.trim(), attrType, fileName, true, false,
(v) -> new BlackboardAttribute(attrType, moduleName, (double) Double.valueOf(v))); (v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v)));
case BYTE: case BYTE:
return parseAttrValue(value.trim(), attrType, fileName, true, false, return parseAttrValue(value.trim(), attrType, fileName, true, false,
(v) -> new BlackboardAttribute(attrType, moduleName, new byte[]{Byte.valueOf(v)})); (v) -> new BlackboardAttribute(attrType, moduleName, new byte[]{Byte.valueOf(v)}));
@ -1022,7 +1020,9 @@ public final class LeappFileProcessor {
* Handles converting a string value to a blackboard attribute. * Handles converting a string value to a blackboard attribute.
* *
* @param orig The original string value. * @param orig The original string value.
*
* @return The generated blackboard attribute. * @return The generated blackboard attribute.
*
* @throws ParseException * @throws ParseException
* @throws NumberFormatException * @throws NumberFormatException
*/ */
@ -1037,32 +1037,37 @@ public final class LeappFileProcessor {
* @param attrType The blackboard attribute type. * @param attrType The blackboard attribute type.
* @param fileName The name of the file from which the value comes. * @param fileName The name of the file from which the value comes.
* @param blankIsNull If string is blank return null attribute. * @param blankIsNull If string is blank return null attribute.
* @param zeroIsNull If string is some version of 0, return null attribute. * @param zeroIsNull If string is some version of 0, return null
* attribute.
* @param valueConverter The means of converting the string value to an * @param valueConverter The means of converting the string value to an
* appropriate blackboard attribute. * appropriate blackboard attribute.
*
* @return The generated blackboard attribute or null if not determined. * @return The generated blackboard attribute or null if not determined.
*/ */
private BlackboardAttribute parseAttrValue(String value, BlackboardAttribute.Type attrType, String fileName, boolean blankIsNull, boolean zeroIsNull, ParseExceptionFunction valueConverter) { private BlackboardAttribute parseAttrValue(String value, BlackboardAttribute.Type attrType, String fileName, boolean blankIsNull, boolean zeroIsNull, ParseExceptionFunction valueConverter) {
// remove non-printable characters from tsv input // remove non-printable characters from tsv input
// https://stackoverflow.com/a/6199346 // https://stackoverflow.com/a/6199346
value = value.replaceAll("\\p{C}", ""); String sanitizedValue = value.replaceAll("\\p{C}", "");
if (blankIsNull && StringUtils.isBlank(value)) { if (blankIsNull && StringUtils.isBlank(sanitizedValue)) {
return null; return null;
} }
if (zeroIsNull && value.matches("^\\s*[0\\.]*\\s*$")) { if (zeroIsNull && sanitizedValue.matches("^\\s*[0\\.]*\\s*$")) {
return null; return null;
} }
try { try {
return valueConverter.apply(value); return valueConverter.apply(sanitizedValue);
} catch (NumberFormatException | ParseException ex) { } catch (NumberFormatException | ParseException ex) {
logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", value, attrType.getValueType().getLabel(), fileName), ex); logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", sanitizedValue, attrType.getValueType().getLabel(), fileName), ex);
return null; return null;
} }
} }
/**
* Read the XML config file and load the mappings into maps
*/
@NbBundle.Messages({ @NbBundle.Messages({
"LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.", "LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.",
"LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.", "LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.",
@ -1070,10 +1075,6 @@ public final class LeappFileProcessor {
"LeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact", "LeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact",
"LeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts." "LeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts."
}) })
/**
* Read the XML config file and load the mappings into maps
*/
private void loadConfigFile() throws IngestModuleException { private void loadConfigFile() throws IngestModuleException {
Document xmlinput; Document xmlinput;
try { try {
@ -1120,7 +1121,7 @@ public final class LeappFileProcessor {
BlackboardArtifact.Type foundArtifactType = null; BlackboardArtifact.Type foundArtifactType = null;
try { try {
foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getArtifactType(artifactName); foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getArtifactType(artifactName);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch artifact type for %s.", artifactName), ex); logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch artifact type for %s.", artifactName), ex);
} }
@ -1165,7 +1166,7 @@ public final class LeappFileProcessor {
BlackboardAttribute.Type foundAttrType = null; BlackboardAttribute.Type foundAttrType = null;
try { try {
foundAttrType = Case.getCurrentCase().getSleuthkitCase().getAttributeType(attributeName.toUpperCase()); foundAttrType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getAttributeType(attributeName.toUpperCase());
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch attribute type for %s.", attributeName), ex); logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch attribute type for %s.", attributeName), ex);
} }
@ -1181,10 +1182,13 @@ public final class LeappFileProcessor {
if (columnName == null) { if (columnName == null) {
logger.log(Level.SEVERE, String.format("No column name provided for [%s]", getXmlAttrIdentifier(parentName, attributeName))); logger.log(Level.SEVERE, String.format("No column name provided for [%s]", getXmlAttrIdentifier(parentName, attributeName)));
continue;
} else if (columnName.trim().length() != columnName.length()) { } else if (columnName.trim().length() != columnName.length()) {
logger.log(Level.SEVERE, String.format("Column name '%s' starts or ends with whitespace for [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName))); logger.log(Level.SEVERE, String.format("Column name '%s' starts or ends with whitespace for [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
continue;
} else if (columnName.matches("[^ \\S]")) { } else if (columnName.matches("[^ \\S]")) {
logger.log(Level.SEVERE, String.format("Column name '%s' contains invalid characters [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName))); logger.log(Level.SEVERE, String.format("Column name '%s' contains invalid characters [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName)));
continue;
} }
TsvColumn thisCol = new TsvColumn( TsvColumn thisCol = new TsvColumn(
@ -1213,7 +1217,8 @@ public final class LeappFileProcessor {
* @param dataSource is the Content object that needs to have the artifact * @param dataSource is the Content object that needs to have the artifact
* added for it * added for it
* @param bbattributes is the collection of blackboard attributes that need * @param bbattributes is the collection of blackboard attributes that need
* to be added to the artifact after the artifact has been created * to be added to the artifact after the artifact has
* been created
* *
* @return The newly-created artifact, or null on error * @return The newly-created artifact, or null on error
*/ */
@ -1225,7 +1230,7 @@ public final class LeappFileProcessor {
case ANALYSIS_RESULT: case ANALYSIS_RESULT:
return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult(); return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult();
default: default:
logger.log(Level.SEVERE, "Unknown category type: " + artType.getCategory().getDisplayName()); logger.log(Level.SEVERE, String.format("Unknown category type: %s", artType.getCategory().getDisplayName()));
return null; return null;
} }
} catch (TskException ex) { } catch (TskException ex) {
@ -1246,7 +1251,7 @@ public final class LeappFileProcessor {
} }
try { try {
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName); Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, Bundle.LeappFileProcessor_postartifacts_error(), ex); //NON-NLS logger.log(Level.SEVERE, Bundle.LeappFileProcessor_postartifacts_error(), ex); //NON-NLS
} }

View File

@ -172,7 +172,7 @@ public class EXIFProcessor implements PictureProcessor {
artifacts.add(userSuspectedArtifact); artifacts.add(userSuspectedArtifact);
try { try {
blackboard.postArtifacts(artifacts, MODULE_NAME); blackboard.postArtifacts(artifacts, MODULE_NAME, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Error posting TSK_METADATA_EXIF and TSK_USER_CONTENT_SUSPECTED artifacts for %s (object ID = %d)", file.getName(), file.getId()), ex); //NON-NLS logger.log(Level.SEVERE, String.format("Error posting TSK_METADATA_EXIF and TSK_USER_CONTENT_SUSPECTED artifacts for %s (object ID = %d)", file.getName(), file.getId()), ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018-2019 Basis Technology Corp. * Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -352,7 +352,7 @@ public class PlasoIngestModule implements DataSourceIngestModule {
* keyword search, and fire an event to notify UI of * keyword search, and fire an event to notify UI of
* this new artifact * this new artifact
*/ */
blackboard.postArtifact(bbart, MODULE_NAME); blackboard.postArtifact(bbart, MODULE_NAME, context.getJobId());
} catch (BlackboardException ex) { } catch (BlackboardException ex) {
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2020 Basis Technology Corp. * Copyright 2020-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -159,7 +159,7 @@ public class YaraIngestModule extends FileIngestModuleAdapter {
if(!artifacts.isEmpty()) { if(!artifacts.isEmpty()) {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName()); blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName(), context.getJobId());
} }
} catch (BlackboardException | NoCurrentCaseException | IngestModuleException | TskCoreException | YaraWrapperException ex) { } catch (BlackboardException | NoCurrentCaseException | IngestModuleException | TskCoreException | YaraWrapperException ex) {

View File

@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.casemodule.services.TagsManager;
import org.sleuthkit.autopsy.coreutils.ImageUtils; import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.report.ReportProgressPanel; import org.sleuthkit.autopsy.report.ReportProgressPanel;
import static org.sleuthkit.autopsy.casemodule.services.TagsManager.getNotableTagLabel; import static org.sleuthkit.autopsy.casemodule.services.TagsManager.getNotableTagLabel;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
@ -1217,7 +1216,7 @@ class TableReportGenerator {
private List<ArtifactData> getFilteredArtifacts(BlackboardArtifact.Type type, HashSet<String> tagNamesFilter) { private List<ArtifactData> getFilteredArtifacts(BlackboardArtifact.Type type, HashSet<String> tagNamesFilter) {
List<ArtifactData> artifacts = new ArrayList<>(); List<ArtifactData> artifacts = new ArrayList<>();
try { try {
for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifacts(type.getTypeID())) { for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getArtifacts(Collections.singletonList(type), settings.getSelectedDataSources())) {
if (shouldFilterFromReport(artifact)) { if (shouldFilterFromReport(artifact)) {
continue; continue;
} }
@ -1232,7 +1231,7 @@ class TableReportGenerator {
continue; continue;
} }
try { try {
artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardAttributes(artifact), uniqueTagNames)); artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getBlackboardAttributes(artifact), uniqueTagNames));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBAttribs")); errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBAttribs"));
logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); //NON-NLS logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); //NON-NLS

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019-2020 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -354,7 +354,7 @@ public class PortableCaseReportModule implements ReportModule {
} }
for (BlackboardAttribute.ATTRIBUTE_TYPE type : BlackboardAttribute.ATTRIBUTE_TYPE.values()) { for (BlackboardAttribute.ATTRIBUTE_TYPE type : BlackboardAttribute.ATTRIBUTE_TYPE.values()) {
try { try {
oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel())); oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getBlackboard().getAttributeType(type.getLabel()));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
handleError("Error looking up attribute name " + type.getLabel(), handleError("Error looking up attribute name " + type.getLabel(),
Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()), Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()),
@ -1084,7 +1084,7 @@ public class PortableCaseReportModule implements ReportModule {
return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID()); return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID());
} }
BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName()); BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getBlackboard().getArtifactType(oldArtifact.getArtifactTypeName());
try { try {
BlackboardArtifact.Type newCustomType = portableSkCase.getBlackboard().getOrAddArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName()); BlackboardArtifact.Type newCustomType = portableSkCase.getBlackboard().getOrAddArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName());
oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID()); oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID());
@ -1424,7 +1424,7 @@ public class PortableCaseReportModule implements ReportModule {
// Add the attachment. The account type specified in the constructor will not be used. // Add the attachment. The account type specified in the constructor will not be used.
CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(), CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(),
newSourceStr, newFile, Account.Type.EMAIL); newSourceStr, newFile, Account.Type.EMAIL, null);
communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments())); communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments()));
} }
catch (BlackboardJsonAttrUtil.InvalidJsonException ex) { catch (BlackboardJsonAttrUtil.InvalidJsonException ex) {

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2017-2020 Basis Technology Corp. * Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -23,7 +23,6 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter; import javax.xml.bind.DatatypeConverter;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -87,6 +86,7 @@ final class CustomArtifactType {
* blackboard. * blackboard.
* *
* @param source The artifact source content. * @param source The artifact source content.
* @param ingestJobId The ingest job ID.
* *
* @return A BlackboardArtifact object. * @return A BlackboardArtifact object.
* *
@ -95,7 +95,7 @@ final class CustomArtifactType {
* @throws Blackboard.BlackboardException If there is an error posting the * @throws Blackboard.BlackboardException If there is an error posting the
* artifact to the blackboard. * artifact to the blackboard.
*/ */
static BlackboardArtifact createAndPostInstance(Content source) throws TskCoreException, Blackboard.BlackboardException { static BlackboardArtifact createAndPostInstance(Content source, long ingestJobId) throws TskCoreException, Blackboard.BlackboardException {
List<BlackboardAttribute> attributes = new ArrayList<>(); List<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0)); attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0));
attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0)); attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0));
@ -131,7 +131,7 @@ final class CustomArtifactType {
} }
Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard(); Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard();
blackboard.postArtifact(artifact, MODULE_NAME); blackboard.postArtifact(artifact, MODULE_NAME, ingestJobId);
return artifact; return artifact;
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2017-2020 Basis Technology Corp. * Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -38,6 +38,7 @@ import org.sleuthkit.datamodel.TskCoreException;
public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceIngestModuleAdapter { public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceIngestModuleAdapter {
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorDataSourceIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorDataSourceIngestModule.class.getName());
private IngestJobContext context;
/** /**
* Adds the custom artifact type this module uses to the case database of * Adds the custom artifact type this module uses to the case database of
@ -51,6 +52,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
*/ */
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
try { try {
CustomArtifactType.addToCaseDatabase(); CustomArtifactType.addToCaseDatabase();
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
@ -70,7 +72,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge
@Override @Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
try { try {
CustomArtifactType.createAndPostInstance(dataSource); CustomArtifactType.createAndPostInstance(dataSource, context.getJobId());
} catch (TskCoreException | Blackboard.BlackboardException ex) { } catch (TskCoreException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Failed to process data source (obj_id = %d)", dataSource.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to process data source (obj_id = %d)", dataSource.getId()), ex);
return ProcessResult.ERROR; return ProcessResult.ERROR;

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2017-2020 Basis Technology Corp. * Copyright 2017-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -37,6 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException;
final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapter { final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapter {
private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorFileIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorFileIngestModule.class.getName());
private IngestJobContext context;
/** /**
* Adds the custom artifact type this module uses to the case database of * Adds the custom artifact type this module uses to the case database of
@ -50,6 +51,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
*/ */
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
try { try {
CustomArtifactType.addToCaseDatabase(); CustomArtifactType.addToCaseDatabase();
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
@ -71,7 +73,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt
return ProcessResult.OK; return ProcessResult.OK;
} }
try { try {
CustomArtifactType.createAndPostInstance(file); CustomArtifactType.createAndPostInstance(file, context.getJobId());
} catch (TskCoreException | Blackboard.BlackboardException ex) { } catch (TskCoreException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex);
return ProcessResult.ERROR; return ProcessResult.ERROR;

View File

@ -25,6 +25,7 @@ import java.util.Collections;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import javax.annotation.concurrent.GuardedBy;
import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestJobStartResult; import org.sleuthkit.autopsy.ingest.IngestJobStartResult;
@ -53,7 +54,7 @@ public final class IngestJobRunner {
*/ */
public static List<IngestModuleError> runIngestJob(Collection<Content> dataSources, IngestJobSettings settings) throws InterruptedException { public static List<IngestModuleError> runIngestJob(Collection<Content> dataSources, IngestJobSettings settings) throws InterruptedException {
Object ingestMonitor = new Object(); Object ingestMonitor = new Object();
IngestJobCompletiontListener completiontListener = new IngestJobCompletiontListener(ingestMonitor); IngestJobCompletionListener completiontListener = new IngestJobCompletionListener(ingestMonitor, dataSources.size());
IngestManager ingestManager = IngestManager.getInstance(); IngestManager ingestManager = IngestManager.getInstance();
ingestManager.addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, completiontListener); ingestManager.addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, completiontListener);
try { try {
@ -81,10 +82,13 @@ public final class IngestJobRunner {
* An ingest job event listener that allows IngestRunner.runIngestJob to * An ingest job event listener that allows IngestRunner.runIngestJob to
* block until the specified ingest job is completed. * block until the specified ingest job is completed.
*/ */
private static final class IngestJobCompletiontListener implements PropertyChangeListener { private static final class IngestJobCompletionListener implements PropertyChangeListener {
private final Object ingestMonitor; private final Object ingestMonitor;
@GuardedBy("ingestMonitor")
private int remainingJobsCount;
/** /**
* Constructs an ingest job event listener that allows * Constructs an ingest job event listener that allows
* IngestRunner.runIngestJob to block until the specified ingest job is * IngestRunner.runIngestJob to block until the specified ingest job is
@ -92,9 +96,11 @@ public final class IngestJobRunner {
* *
* @param ingestMonitor A Java object to notify when the ingest job is * @param ingestMonitor A Java object to notify when the ingest job is
* omcpleted. * omcpleted.
* @param jobsCount The number of jobs to listen for before notifying monitor.
*/ */
IngestJobCompletiontListener(Object ingestMonitor) { IngestJobCompletionListener(Object ingestMonitor, int jobsCount) {
this.ingestMonitor = ingestMonitor; this.ingestMonitor = ingestMonitor;
this.remainingJobsCount = jobsCount;
} }
/** /**
@ -109,11 +115,14 @@ public final class IngestJobRunner {
String eventType = event.getPropertyName(); String eventType = event.getPropertyName();
if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
synchronized (ingestMonitor) { synchronized (ingestMonitor) {
this.remainingJobsCount--;
if (this.remainingJobsCount <= 0) {
ingestMonitor.notify(); ingestMonitor.notify();
} }
} }
} }
} }
} }
}
} }

View File

@ -157,7 +157,7 @@ public class AddManualEvent extends Action {
BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null); BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null);
try { try {
sleuthkitCase.getBlackboard().postArtifact(artifact, source); sleuthkitCase.getBlackboard().postArtifact(artifact, source, null);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait(); new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();

View File

@ -20,6 +20,7 @@
package org.sleuthkit.autopsy.commonpropertiessearch; package org.sleuthkit.autopsy.commonpropertiessearch;
import java.sql.SQLException; import java.sql.SQLException;
import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -88,6 +89,26 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
this.utils.tearDown(); this.utils.tearDown();
} }
/**
* Assert that the given file appears a precise number times in the given
* data source.
*
* @param searchDomain search domain
* @param objectIdToDataSourceMap mapping of file ids to data source names
* @param fileName name of file to search for
* @param dataSource name of data source where file should
* appear
* @param instanceCount number of appearances of the given file
*
* @return true if a file with the given name exists the specified number of
* times in the given data source
*/
static void assertInstanceExistenceAndCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource, int instanceCount) {
int foundCount = IntraCaseTestUtils.getInstanceCount(searchDomain, objectIdToDataSourceMap, fileName, dataSource);
String errorMessage = MessageFormat.format("Expected to find {0} matches for {1} in {2} but found {3}.", instanceCount, fileName, dataSource, foundCount);
assertEquals(errorMessage, instanceCount, foundCount);
}
/** /**
* Find all matches & all file types. Confirm file.jpg is found on all three * Find all matches & all file types. Confirm file.jpg is found on all three
* and file.docx is found on two. * and file.docx is found on two.
@ -103,25 +124,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = IntraCaseTestUtils.getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = IntraCaseTestUtils.getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -144,25 +165,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -185,25 +206,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -227,25 +248,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -269,25 +290,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -311,25 +332,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -353,25 +374,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -394,25 +415,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);
@ -435,25 +456,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase {
List<AbstractFile> files = getFiles(objectIdToDataSource.keySet()); List<AbstractFile> files = getFiles(objectIdToDataSource.keySet());
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0);
assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0);
} catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) {
Exceptions.printStackTrace(ex); Exceptions.printStackTrace(ex);

View File

@ -179,6 +179,37 @@ class IntraCaseTestUtils {
return tally == instanceCount; return tally == instanceCount;
} }
/**
* Verify that the given file appears a precise number times in the given
* data source.
*
* @param searchDomain search domain
* @param objectIdToDataSourceMap mapping of file ids to data source names
* @param fileName name of file to search for
* @param dataSource name of data source where file should appear
* @param instanceCount number of appearances of the given file
* @return The count of items found.
*/
static int getInstanceCount(List<AbstractFile> searchDomain, Map<Long, String> objectIdToDataSourceMap, String fileName, String dataSource) {
int tally = 0;
for (AbstractFile file : searchDomain) {
Long objectId = file.getId();
String name = file.getName();
String dataSourceName = objectIdToDataSourceMap.get(objectId);
if (name.equalsIgnoreCase(fileName) && dataSourceName.equalsIgnoreCase(dataSource)) {
tally++;
}
}
return tally;
}
/** /**
* Convenience method which verifies that a file exists within a given data * Convenience method which verifies that a file exists within a given data
* source exactly once. * source exactly once.

View File

@ -177,7 +177,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage", "# {0} - unitSeparator",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder",
"AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?",
@ -193,7 +194,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer {
STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")), STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")),
COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")), COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")),
STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")), STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")),
STAGE_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime")), STAGE_TIME(Bundle.AutoIngestControlPanel_JobsTableModel_ColumnHeader_StageTime(DurationCellRenderer.getUnitSeperator())),
STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")), STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")),
CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")), CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")),
IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")), IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")),

View File

@ -53,7 +53,8 @@ final class AutoIngestJobsNode extends AbstractNode {
"AutoIngestJobsNode.dataSource.text=Data Source", "AutoIngestJobsNode.dataSource.text=Data Source",
"AutoIngestJobsNode.hostName.text=Host Name", "AutoIngestJobsNode.hostName.text=Host Name",
"AutoIngestJobsNode.stage.text=Stage", "AutoIngestJobsNode.stage.text=Stage",
"AutoIngestJobsNode.stageTime.text=Time in Stage", "# {0} - unitSeparator",
"AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)",
"AutoIngestJobsNode.jobCreated.text=Job Created", "AutoIngestJobsNode.jobCreated.text=Job Created",
"AutoIngestJobsNode.jobCompleted.text=Job Completed", "AutoIngestJobsNode.jobCompleted.text=Job Completed",
"AutoIngestJobsNode.priority.text=Prioritized", "AutoIngestJobsNode.priority.text=Prioritized",
@ -345,8 +346,10 @@ final class AutoIngestJobsNode extends AbstractNode {
jobWrapper.getProcessingHostName())); jobWrapper.getProcessingHostName()));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
status.getDescription())); status.getDescription()));
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
DurationCellRenderer.longToDurationString((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())))); Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
DurationCellRenderer.longToDurationString(Date.from(Instant.now()).getTime() - status.getStartDate().getTime())));
break; break;
case COMPLETED_JOB: case COMPLETED_JOB:
ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(),

View File

@ -31,6 +31,7 @@ import org.sleuthkit.autopsy.datamodel.EmptyNode;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.AutoIngestJobStatus; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.AutoIngestJobStatus;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.JobNode; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.JobNode;
import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestNodeRefreshEvents.AutoIngestRefreshEvent; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestNodeRefreshEvents.AutoIngestRefreshEvent;
import org.sleuthkit.autopsy.guiutils.DurationCellRenderer;
import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer; import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer;
/** /**
@ -64,6 +65,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
customize(); customize();
} }
/** /**
* Set up the AutoIngestJobsPanel's so that its outlineView is displaying * Set up the AutoIngestJobsPanel's so that its outlineView is displaying
* the correct columns for the specified AutoIngestJobStatus * the correct columns for the specified AutoIngestJobStatus
@ -99,7 +102,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
outlineView.setPropertyColumns(Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_dataSource_text(), outlineView.setPropertyColumns(Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_dataSource_text(),
Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(),
Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(),
Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text()); Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()),
Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()));
indexOfColumn = getColumnIndexByName(Bundle.AutoIngestJobsNode_caseName_text()); indexOfColumn = getColumnIndexByName(Bundle.AutoIngestJobsNode_caseName_text());
if (indexOfColumn != INVALID_INDEX) { if (indexOfColumn != INVALID_INDEX) {
outline.setColumnSorted(indexOfColumn, true, 1); outline.setColumnSorted(indexOfColumn, true, 1);
@ -177,8 +181,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
* Update the contents of this AutoIngestJobsPanel while retaining currently * Update the contents of this AutoIngestJobsPanel while retaining currently
* selected node. * selected node.
* *
* @param refreshEvent - the AutoIngestRefreshEvent which will provide the new * @param refreshEvent - the AutoIngestRefreshEvent which will provide the
* contents * new contents
*/ */
void refresh(AutoIngestRefreshEvent refreshEvent) { void refresh(AutoIngestRefreshEvent refreshEvent) {
synchronized (this) { synchronized (this) {
@ -191,7 +195,6 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa
} }
outline.setRowSelectionAllowed(true); outline.setRowSelectionAllowed(true);
outline.setFocusable(true); outline.setFocusable(true);
} }
} }

View File

@ -78,7 +78,8 @@ AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath=\ Manifest F
AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage # {0} - unitSeparator
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)
AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started
AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status
AutoIngestControlPanel.OK=OK AutoIngestControlPanel.OK=OK
@ -140,7 +141,8 @@ AutoIngestJobsNode.prioritized.false=No
AutoIngestJobsNode.prioritized.true=Yes AutoIngestJobsNode.prioritized.true=Yes
AutoIngestJobsNode.priority.text=Prioritized AutoIngestJobsNode.priority.text=Prioritized
AutoIngestJobsNode.stage.text=Stage AutoIngestJobsNode.stage.text=Stage
AutoIngestJobsNode.stageTime.text=Time in Stage # {0} - unitSeparator
AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)
AutoIngestJobsNode.status.text=Status AutoIngestJobsNode.status.text=Status
AutoIngestJobsPanel.waitNode.text=Please Wait... AutoIngestJobsPanel.waitNode.text=Please Wait...
AutoIngestMetricsDialog.initReportText=Select a date above and click the 'Generate Metrics Report' button to generate\na metrics report. AutoIngestMetricsDialog.initReportText=Select a date above and click the 'Generate Metrics Report' button to generate\na metrics report.

View File

@ -1084,13 +1084,13 @@ final class FileExportRuleSet implements Serializable, Comparable<FileExportRule
SleuthkitCase caseDb = currentCase.getSleuthkitCase(); SleuthkitCase caseDb = currentCase.getSleuthkitCase();
BlackboardArtifact.Type artifactType; BlackboardArtifact.Type artifactType;
try { try {
artifactType = caseDb.getArtifactType(artifactTypeName); artifactType = caseDb.getBlackboard().getArtifactType(artifactTypeName);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new ExportRulesException(String.format("The specified %s artifact type does not exist in case database for %s", artifactTypeName, currentCase.getCaseDirectory()), ex); throw new ExportRulesException(String.format("The specified %s artifact type does not exist in case database for %s", artifactTypeName, currentCase.getCaseDirectory()), ex);
} }
BlackboardAttribute.Type attributeType; BlackboardAttribute.Type attributeType;
try { try {
attributeType = caseDb.getAttributeType(attributeTypeName); attributeType = caseDb.getBlackboard().getAttributeType(attributeTypeName);
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
throw new ExportRulesException(String.format("The specified %s attribute type does not exist in case database for %s", attributeTypeName, currentCase.getCaseDirectory()), ex); throw new ExportRulesException(String.format("The specified %s attribute type does not exist in case database for %s", attributeTypeName, currentCase.getCaseDirectory()), ex);
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -177,7 +177,7 @@ public class ObjectDetectectionFileIngestModule extends FileIngestModuleAdapter
/* /*
* Index the artifact for keyword search. * Index the artifact for keyword search.
*/ */
blackboard.postArtifact(artifact, MODULE_NAME); blackboard.postArtifact(artifact, MODULE_NAME, jobId);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy * Autopsy
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2018-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -390,7 +390,7 @@ class VolatilityProcessor {
try { try {
// index the artifact for keyword search // index the artifact for keyword search
blackboard.postArtifact(volArtifact, VOLATILITY); blackboard.postArtifact(volArtifact, VOLATILITY, null);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName)); errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName));
/* /*

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule):
# Create a GeoArtifactsHelper for this file. # Create a GeoArtifactsHelper for this file.
geoArtifactHelper = GeoArtifactsHelper( geoArtifactHelper = GeoArtifactsHelper(
self.skCase, self.moduleName, None, file) self.skCase, self.moduleName, None, file, context.getJobId())
if self.writeDebugMsgs: if self.writeDebugMsgs:
self.log(Level.INFO, "Processing " + file.getUniquePath() + self.log(Level.INFO, "Processing " + file.getUniquePath() +
@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule):
art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
self.blackboard.postArtifact(art, self.moduleName) self.blackboard.postArtifact(art, self.moduleName, context.getJobId())
except Blackboard.BlackboardException as e: except Blackboard.BlackboardException as e:
self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " + self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " +

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -104,9 +104,8 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer):
# NOTE: originally commented out # NOTE: originally commented out
try: try:
# index the artifact for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
blackboard.postArtifact(artifact, general.MODULE_NAME) blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
except Blackboard.BlackboardException as ex: except Blackboard.BlackboardException as ex:
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex) self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -102,9 +102,8 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer):
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy)) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy))
# artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence)) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence))
try: try:
# index the artifact for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
blackboard.postArtifact(artifact, general.MODULE_NAME) blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId())
except Blackboard.BlackboardException as ex: except Blackboard.BlackboardException as ex:
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex) self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -83,12 +83,12 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer):
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
callLogDb.getDBFile(), callLogDb.getDBFile(),
Account.Type.PHONE, Account.Type.PHONE, selfAccountId ) Account.Type.PHONE, Account.Type.PHONE, selfAccountId, context.getJobId())
else: else:
callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
callLogDb.getDBFile(), callLogDb.getDBFile(),
Account.Type.PHONE ) Account.Type.PHONE, context.getJobId())
for tableName in CallLogAnalyzer._tableNames: for tableName in CallLogAnalyzer._tableNames:
try: try:

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -75,7 +75,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
return return
for contactDb in contactsDbs: for contactDb in contactsDbs:
try: try:
self.__findContactsInDB(contactDb, dataSource) self.__findContactsInDB(contactDb, dataSource, context)
except Exception as ex: except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing Contacts", ex) self._logger.log(Level.SEVERE, "Error parsing Contacts", ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())
@ -86,7 +86,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
""" """
Queries the given contact database and adds Contacts to the case. Queries the given contact database and adds Contacts to the case.
""" """
def __findContactsInDB(self, contactDb, dataSource): def __findContactsInDB(self, contactDb, dataSource, context):
if not contactDb: if not contactDb:
return return
@ -97,7 +97,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
contactDb.getDBFile(), contactDb.getDBFile(),
Account.Type.PHONE ) Account.Type.PHONE, context.getJobId())
# get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype) # get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype)
# sorted by name, so phonenumber/email would be consecutive for a person if they exist. # sorted by name, so phonenumber/email would be consecutive for a person if they exist.
@ -158,7 +158,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer):
phoneNumber, # phoneNumber, phoneNumber, # phoneNumber,
None, # homePhoneNumber, None, # homePhoneNumber,
None, # mobilePhoneNumber, None, # mobilePhoneNumber,
emailAddr) # emailAddr emailAddr, context.getJobId()) # emailAddr
except SQLException as ex: except SQLException as ex:
self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex) self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex)

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -148,11 +148,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
if self.selfAccountId is not None: if self.selfAccountId is not None:
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, contactsDb.getDBFile(), self._MODULE_NAME, contactsDb.getDBFile(),
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId ) Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
else: else:
contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, contactsDb.getDBFile(), self._MODULE_NAME, contactsDb.getDBFile(),
Account.Type.FACEBOOK) Account.Type.FACEBOOK, context.getJobId())
## get the other contacts/friends ## get the other contacts/friends
contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0") contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0")
@ -492,11 +492,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer):
if self.selfAccountId is not None: if self.selfAccountId is not None:
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, threadsDb.getDBFile(), self._MODULE_NAME, threadsDb.getDBFile(),
Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId ) Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId())
else: else:
threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, threadsDb.getDBFile(), self._MODULE_NAME, threadsDb.getDBFile(),
Account.Type.FACEBOOK) Account.Type.FACEBOOK, context.getJobId())
self.analyzeMessages(threadsDb, threadsDBHelper) self.analyzeMessages(threadsDb, threadsDBHelper)
self.analyzeCallLogs(threadsDb, threadsDBHelper) self.analyzeCallLogs(threadsDb, threadsDBHelper)

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -76,7 +76,7 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
try: try:
jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName())
ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled)
self.__findGeoLocationsInDB(jFile.toString(), abstractFile) self.__findGeoLocationsInDB(jFile.toString(), abstractFile, context)
except Exception as ex: except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())
@ -84,13 +84,13 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer):
# Error finding Google map locations. # Error finding Google map locations.
pass pass
def __findGeoLocationsInDB(self, databasePath, abstractFile): def __findGeoLocationsInDB(self, databasePath, abstractFile, context):
if not databasePath: if not databasePath:
return return
try: try:
artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(), artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(),
general.MODULE_NAME, self.PROGRAM_NAME, abstractFile) general.MODULE_NAME, self.PROGRAM_NAME, abstractFile, context.getJobId())
Class.forName("org.sqlite.JDBC") # load JDBC driver Class.forName("org.sqlite.JDBC") # load JDBC driver
connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath)
statement = connection.createStatement() statement = connection.createStatement()

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -109,12 +109,12 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer):
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
friendsDb.getDBFile(), friendsDb.getDBFile(),
Account.Type.IMO, Account.Type.IMO, selfAccountId ) Account.Type.IMO, Account.Type.IMO, selfAccountId, context.getJobId())
else: else:
friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
friendsDb.getDBFile(), friendsDb.getDBFile(),
Account.Type.IMO ) Account.Type.IMO, context.getJobId())
contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends") contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends")
if contactsResultSet is not None: if contactsResultSet is not None:
while contactsResultSet.next(): while contactsResultSet.next():

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -75,7 +75,7 @@ class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer):
try: try:
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(), libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, libraryDb.getDBFile()) self._MODULE_NAME, libraryDb.getDBFile(), context.getJobId())
queryString = "SELECT doc_id, purchase_time FROM ownership" queryString = "SELECT doc_id, purchase_time FROM ownership"
ownershipResultSet = libraryDb.runQuery(queryString) ownershipResultSet = libraryDb.runQuery(queryString)
if ownershipResultSet is not None: if ownershipResultSet is not None:

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -117,7 +117,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
contact_and_message_db.getDBFile(), Account.Type.LINE) contact_and_message_db.getDBFile(), Account.Type.LINE, context.getJobId())
self.parse_contacts(contact_and_message_db, helper) self.parse_contacts(contact_and_message_db, helper)
self.parse_messages(contact_and_message_db, helper, current_case) self.parse_messages(contact_and_message_db, helper, current_case)
@ -125,7 +125,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
calllog_db.getDBFile(), Account.Type.LINE) calllog_db.getDBFile(), Account.Type.LINE, context.getJobId())
self.parse_calllogs(dataSource, calllog_db, helper) self.parse_calllogs(dataSource, calllog_db, helper)
except NoCurrentCaseException as ex: except NoCurrentCaseException as ex:

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -89,7 +89,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
for cookiesDb in cookiesDbs: for cookiesDb in cookiesDbs:
try: try:
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, cookiesDb.getDBFile()) self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId())
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies") cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
if cookiesResultSet is not None: if cookiesResultSet is not None:
while cookiesResultSet.next(): while cookiesResultSet.next():
@ -119,7 +119,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
for historyDb in historyDbs: for historyDb in historyDbs:
try: try:
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, historyDb.getDBFile()) self._MODULE_NAME, historyDb.getDBFile(), context.getJobId())
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls") historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
if historyResultSet is not None: if historyResultSet is not None:
while historyResultSet.next(): while historyResultSet.next():
@ -148,7 +148,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
for downloadsDb in downloadsDbs: for downloadsDb in downloadsDbs:
try: try:
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, downloadsDb.getDBFile()) self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId())
queryString = "SELECT target_path, start_time, url FROM downloads"\ queryString = "SELECT target_path, start_time, url FROM downloads"\
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id" " INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
downloadsResultSet = downloadsDb.runQuery(queryString) downloadsResultSet = downloadsDb.runQuery(queryString)
@ -177,7 +177,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
for autofillDb in autofillDbs: for autofillDb in autofillDbs:
try: try:
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, autofillDb.getDBFile()) self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId())
autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill") autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill")
if autofillsResultSet is not None: if autofillsResultSet is not None:
while autofillsResultSet.next(): while autofillsResultSet.next():
@ -205,7 +205,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer):
for webFormAddressDb in webFormAddressDbs: for webFormAddressDb in webFormAddressDbs:
try: try:
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, webFormAddressDb.getDBFile()) self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId())
queryString = """ queryString = """
SELECT street_address, city, state, zipcode, country_code, SELECT street_address, city, state, zipcode, country_code,
date_modified, first_name, last_name, number, email date_modified, first_name, last_name, number, email

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2018 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -72,7 +72,7 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
skCase = Case.getCurrentCase().getSleuthkitCase() skCase = Case.getCurrentCase().getSleuthkitCase()
geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile()) geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile(), context.getJobId())
poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois" poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois"
poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString) poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString)
@ -96,9 +96,8 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer):
artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes)
try: try:
# index the artifact for keyword search
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard()
blackboard.postArtifact(artifact, self._MODULE_NAME) blackboard.postArtifact(artifact, self._MODULE_NAME, context.getJobId())
except Blackboard.BlackboardException as ex: except Blackboard.BlackboardException as ex:
self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex) self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -87,7 +87,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for sbrowserDb in sbrowserDbs: for sbrowserDb in sbrowserDbs:
try: try:
sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, sbrowserDb.getDBFile()) self._MODULE_NAME, sbrowserDb.getDBFile(), context.getJobId())
bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL") bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL")
if bookmarkResultSet is not None: if bookmarkResultSet is not None:
while bookmarkResultSet.next(): while bookmarkResultSet.next():
@ -115,7 +115,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for cookiesDb in cookiesDbs: for cookiesDb in cookiesDbs:
try: try:
cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, cookiesDb.getDBFile()) self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId())
cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies") cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies")
if cookiesResultSet is not None: if cookiesResultSet is not None:
while cookiesResultSet.next(): while cookiesResultSet.next():
@ -145,7 +145,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for historyDb in historyDbs: for historyDb in historyDbs:
try: try:
historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, historyDb.getDBFile()) self._MODULE_NAME, historyDb.getDBFile(), context.getJobId())
historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls") historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls")
if historyResultSet is not None: if historyResultSet is not None:
while historyResultSet.next(): while historyResultSet.next():
@ -174,7 +174,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for downloadsDb in downloadsDbs: for downloadsDb in downloadsDbs:
try: try:
downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, downloadsDb.getDBFile()) self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId())
queryString = "SELECT target_path, start_time, url FROM downloads"\ queryString = "SELECT target_path, start_time, url FROM downloads"\
" INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id" " INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id"
downloadsResultSet = downloadsDb.runQuery(queryString) downloadsResultSet = downloadsDb.runQuery(queryString)
@ -203,7 +203,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for autofillDb in autofillDbs: for autofillDb in autofillDbs:
try: try:
autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, autofillDb.getDBFile()) self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId())
queryString = """ queryString = """
SELECT name, value, count, date_created SELECT name, value, count, date_created
FROM autofill FROM autofill
@ -236,7 +236,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer):
for webFormAddressDb in webFormAddressDbs: for webFormAddressDb in webFormAddressDbs:
try: try:
webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(),
self._MODULE_NAME, webFormAddressDb.getDBFile()) self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId())
""" """
Autofill form data is split across multiple tables. The quqery below joins the various tables. Autofill form data is split across multiple tables. The quqery below joins the various tables.
""" """

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -85,7 +85,7 @@ class ShareItAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, historyDb.getDBFile(), self._MODULE_NAME, historyDb.getDBFile(),
Account.Type.SHAREIT) Account.Type.SHAREIT, context.getJobId())
queryString = """ queryString = """
SELECT history_type, device_id, device_name, description, timestamp, file_path SELECT history_type, device_id, device_name, description, timestamp, file_path

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -129,13 +129,13 @@ class SkypeAnalyzer(general.AndroidComponentAnalyzer):
if user_account_instance is None: if user_account_instance is None:
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
skype_db.getDBFile(), Account.Type.SKYPE skype_db.getDBFile(), Account.Type.SKYPE, context.getJobId()
) )
else: else:
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
skype_db.getDBFile(), Account.Type.SKYPE, skype_db.getDBFile(), Account.Type.SKYPE,
Account.Type.SKYPE, user_account_instance Account.Type.SKYPE, user_account_instance, context.getJobId()
) )
self.parse_contacts(skype_db, helper) self.parse_contacts(skype_db, helper)
self.parse_calllogs(skype_db, helper) self.parse_calllogs(skype_db, helper)

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -72,7 +72,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
tangoDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "tc.db", True, self._PACKAGE_NAME) tangoDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "tc.db", True, self._PACKAGE_NAME)
for tangoDbFile in tangoDbFiles: for tangoDbFile in tangoDbFiles:
try: try:
self.__findTangoMessagesInDB(tangoDbFile, dataSource) self.__findTangoMessagesInDB(tangoDbFile, dataSource, context)
except Exception as ex: except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())
@ -80,7 +80,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
# Error finding Tango messages. # Error finding Tango messages.
pass pass
def __findTangoMessagesInDB(self, tangoDb, dataSource): def __findTangoMessagesInDB(self, tangoDb, dataSource, context):
if not tangoDb: if not tangoDb:
return return
@ -91,7 +91,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer):
tangoDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), tangoDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
tangoDb.getDBFile(), tangoDb.getDBFile(),
Account.Type.TANGO ) Account.Type.TANGO, context.getJobId())
resultSet = tangoDb.runQuery( resultSet = tangoDb.runQuery(
"SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;") "SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;")

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -80,12 +80,12 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer):
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
messageDb.getDBFile(), messageDb.getDBFile(),
Account.Type.PHONE, Account.Type.IMO, selfAccountId ) Account.Type.PHONE, Account.Type.IMO, selfAccountId, context.getJobId())
else: else:
messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
messageDb.getDBFile(), messageDb.getDBFile(),
Account.Type.PHONE ) Account.Type.PHONE, context.getJobId())
uuid = UUID.randomUUID().toString() uuid = UUID.randomUUID().toString()
messagesResultSet = messageDb.runQuery("SELECT address, date, read, type, subject, body, thread_id FROM sms;") messagesResultSet = messageDb.runQuery("SELECT address, date, read, type, subject, body, thread_id FROM sms;")

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -91,7 +91,7 @@ class TextNowAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
textnow_db.getDBFile(), Account.Type.TEXTNOW textnow_db.getDBFile(), Account.Type.TEXTNOW, context.getJobId()
) )
self.parse_contacts(textnow_db, helper) self.parse_contacts(textnow_db, helper)
self.parse_calllogs(textnow_db, helper) self.parse_calllogs(textnow_db, helper)

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -91,7 +91,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
contact_and_calllog_db.getDBFile(), Account.Type.VIBER) contact_and_calllog_db.getDBFile(), Account.Type.VIBER, context.getJobId())
self.parse_contacts(contact_and_calllog_db, helper) self.parse_contacts(contact_and_calllog_db, helper)
self.parse_calllogs(contact_and_calllog_db, helper) self.parse_calllogs(contact_and_calllog_db, helper)
@ -100,7 +100,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
message_db.getDBFile(), Account.Type.VIBER) message_db.getDBFile(), Account.Type.VIBER, context.getJobId())
self.parse_messages(message_db, helper, current_case) self.parse_messages(message_db, helper, current_case)
except NoCurrentCaseException as ex: except NoCurrentCaseException as ex:
@ -131,9 +131,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer):
attributes = ArrayList() attributes = ArrayList()
attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name())) attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name()))
artifact = contacts_db.getDBFile().newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes) artifact = contacts_db.getDBFile().newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes)
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME, context.getJobId())
# Post the artifact to blackboard
current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME)
contacts_parser.close() contacts_parser.close()
except SQLException as ex: except SQLException as ex:

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -145,14 +145,14 @@ class WhatsAppAnalyzer(general.AndroidComponentAnalyzer):
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
contact_db.getDBFile(), Account.Type.WHATSAPP) contact_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId())
self.parse_contacts(contact_db, helper) self.parse_contacts(contact_db, helper)
for calllog_and_message_db in calllog_and_message_dbs: for calllog_and_message_db in calllog_and_message_dbs:
current_case = Case.getCurrentCaseThrows() current_case = Case.getCurrentCaseThrows()
helper = CommunicationArtifactsHelper( helper = CommunicationArtifactsHelper(
current_case.getSleuthkitCase(), self._PARSER_NAME, current_case.getSleuthkitCase(), self._PARSER_NAME,
calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP) calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId())
self.parse_calllogs(calllog_and_message_db, helper) self.parse_calllogs(calllog_and_message_db, helper)
self.parse_messages(dataSource, calllog_and_message_db, helper, current_case) self.parse_messages(dataSource, calllog_and_message_db, helper, current_case)

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2016-2020 Basis Technology Corp. Copyright 2016-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -78,7 +78,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
wwfDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "WordsFramework", True, self._PACKAGE_NAME) wwfDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "WordsFramework", True, self._PACKAGE_NAME)
for wwfDbFile in wwfDbFiles: for wwfDbFile in wwfDbFiles:
try: try:
self.__findWWFMessagesInDB(wwfDbFile, dataSource) self.__findWWFMessagesInDB(wwfDbFile, dataSource, context)
except Exception as ex: except Exception as ex:
self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex)
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())
@ -88,7 +88,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
self._logger.log(Level.SEVERE, traceback.format_exc()) self._logger.log(Level.SEVERE, traceback.format_exc())
pass pass
def __findWWFMessagesInDB(self, wwfDb, dataSource): def __findWWFMessagesInDB(self, wwfDb, dataSource, context):
if not wwfDb: if not wwfDb:
return return
@ -98,7 +98,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer):
wwfDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), wwfDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._PARSER_NAME, self._PARSER_NAME,
wwfDb.getDBFile(), wwfDb.getDBFile(),
wwfAccountType ) wwfAccountType, context.getJobId())
uuid = UUID.randomUUID().toString() uuid = UUID.randomUUID().toString()

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -91,11 +91,11 @@ class XenderAnalyzer(general.AndroidComponentAnalyzer):
if selfAccountId is not None: if selfAccountId is not None:
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, transactionDb.getDBFile(), self._MODULE_NAME, transactionDb.getDBFile(),
Account.Type.XENDER, Account.Type.XENDER, selfAccountId ) Account.Type.XENDER, Account.Type.XENDER, selfAccountId, context.getJobId())
else: else:
transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, transactionDb.getDBFile(), self._MODULE_NAME, transactionDb.getDBFile(),
Account.Type.XENDER) Account.Type.XENDER, context.getJobId())
queryString = """ queryString = """
SELECT f_path, f_display_name, f_size_str, c_start_time, c_direction, c_session_id, SELECT f_path, f_display_name, f_size_str, c_start_time, c_direction, c_session_id,

View File

@ -1,7 +1,7 @@
""" """
Autopsy Forensic Browser Autopsy Forensic Browser
Copyright 2019-2020 Basis Technology Corp. Copyright 2019-2021 Basis Technology Corp.
Contact: carrier <at> sleuthkit <dot> org Contact: carrier <at> sleuthkit <dot> org
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
@ -81,7 +81,7 @@ class ZapyaAnalyzer(general.AndroidComponentAnalyzer):
# #
transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(),
self._MODULE_NAME, transferDb.getDBFile(), self._MODULE_NAME, transferDb.getDBFile(),
Account.Type.ZAPYA) Account.Type.ZAPYA, context.getJobId())
queryString = "SELECT device, name, direction, createtime, path, title FROM transfer" queryString = "SELECT device, name, direction, createtime, path, title FROM transfer"
transfersResultSet = transferDb.runQuery(queryString) transfersResultSet = transferDb.runQuery(queryString)

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -426,7 +426,7 @@ class AdHocSearchChildFactory extends ChildFactory<KeyValue> {
final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr;
try { try {
progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true)); progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true));
hits.process(progress, null, this, false, saveResults); hits.process(progress, null, this, false, saveResults, null);
} finally { } finally {
finalizeWorker(); finalizeWorker();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2014 - 2017 Basis Technology Corp. * Copyright 2014 - 2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -564,7 +564,7 @@ final class IngestSearchRunner {
subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress); subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress);
// Create blackboard artifacts // Create blackboard artifacts
newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true); newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true, job.getJobId());
} //if has results } //if has results

View File

@ -648,7 +648,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
} }
if (!bbartifacts.isEmpty()) { if (!bbartifacts.isEmpty()) {
try { try {
Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName); Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName, jobId);
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
// Log error and return to continue processing // Log error and return to continue processing
logger.log(Level.WARNING, String.format("Unable to post blackboard artifacts for file $s.", aFile.getParentPath() + aFile.getName()), ex); //NON-NLS logger.log(Level.WARNING, String.format("Unable to post blackboard artifacts for file $s.", aFile.getParentPath() + aFile.getName()), ex); //NON-NLS

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2012-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -35,7 +35,8 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;; import org.sleuthkit.autopsy.ingest.IngestServices;
;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -141,9 +142,10 @@ class QueryResults {
* messages inbox if there is a keyword hit in the text * messages inbox if there is a keyword hit in the text
* exrtacted from the text source object. * exrtacted from the text source object.
* @param saveResults Flag whether to save search results as KWS artifacts. * @param saveResults Flag whether to save search results as KWS artifacts.
* * @param ingestJobId The numeric identifier of the ingest job within which
* the artifacts are being created, may be null.
*/ */
void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<?, ?> worker, boolean notifyInbox, boolean saveResults) { void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker<?, ?> worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) {
/* /*
* Initialize the progress indicator to the number of keywords that will * Initialize the progress indicator to the number of keywords that will
* be processed. * be processed.
@ -253,7 +255,7 @@ class QueryResults {
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
Blackboard blackboard = tskCase.getBlackboard(); Blackboard blackboard = tskCase.getBlackboard();
blackboard.postArtifacts(hitArtifacts, MODULE_NAME); blackboard.postArtifacts(hitArtifacts, MODULE_NAME, ingestJobId);
} catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS
} }

View File

@ -2,7 +2,7 @@ OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Name=RecentActivity
OpenIDE-Module-Short-Description=Recent Activity finder ingest module OpenIDE-Module-Short-Description=Recent Activity finder ingest module
Chrome.moduleName=Chromium Chrome.moduleName=Chromium Analyzer
Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files. Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files.
Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files. Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files.
Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1} Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1}
@ -19,7 +19,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f
Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files.
Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
ExtractIE.moduleName.text=Internet Explorer ExtractIE.moduleName.text=Internet Explorer Analyzer
ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks. ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks.
ExtractIE.parentModuleName.noSpace=RecentActivity ExtractIE.parentModuleName.noSpace=RecentActivity
ExtractIE.parentModuleName=Recent Activity ExtractIE.parentModuleName=Recent Activity
@ -35,7 +35,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer
ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1} ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1}
ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1} ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1}
ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry. ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry.
ExtractRegistry.moduleName.text=Registry ExtractRegistry.moduleName.text=Windows Registry Analyzer
ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0} ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0}
ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1} ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1}
ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1} ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1}
@ -43,7 +43,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity
ExtractRegistry.programName=RegRipper ExtractRegistry.programName=RegRipper
ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1}
ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1}
Firefox.moduleName=FireFox Firefox.moduleName=FireFox Analyzer
Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox.
Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found.
Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1} Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1}
@ -85,12 +85,12 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File
RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1} RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1}
RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
RecentDocumentsByLnk.parentModuleName=Recent Activity RecentDocumentsByLnk.parentModuleName=Recent Activity
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer
SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.engineName.none=NONE
SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE
SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3}
SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity
SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity
ExtractWebAccountType.moduleName.text=Web Account Type ExtractWebAccountType.moduleName.text=Web Account Type Analyzer
ExtractWebAccountType.parentModuleName=Recent Activity ExtractWebAccountType.parentModuleName=Recent Activity
UsbDeviceIdMapper.parseAndLookup.text=Product: {0} UsbDeviceIdMapper.parseAndLookup.text=Product: {0}

View File

@ -4,7 +4,6 @@ cannotParseXml=Unable to parse XML file:
ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis. ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis.
ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis. ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.
ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s. ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s.
ChromeCacheExtractor.moduleName=ChromeCacheExtractor
# {0} - module name # {0} - module name
# {1} - row number # {1} - row number
# {2} - table length # {2} - table length
@ -13,25 +12,26 @@ ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries
DataSourceUsage_AndroidMedia=Android Media Card DataSourceUsage_AndroidMedia=Android Media Card
DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card
DataSourceUsage_FlashDrive=Flash Drive DataSourceUsage_FlashDrive=Flash Drive
# {0} - OS name
DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})
DataSourceUsageAnalyzer.parentModuleName=Recent Activity DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer
DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine
DomainCategoryRunner_moduleName_text=DomainCategoryRunner DomainCategoryRunner_moduleName_text=Domain Category Analyzer
DomainCategoryRunner_parentModuleName=Recent Activity DomainCategoryRunner_parentModuleName=Recent Activity
DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types
Extract.indexError.message=Failed to index artifact for keyword search.
Extract.noOpenCase.errMsg=No open case available.
ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history
ExtractEdge_Module_Name=Microsoft Edge ExtractEdge_Module_Name=Microsoft Edge Analyzer
ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file
ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file
ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer
ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file
# {0} - sub module name
ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history
ExtractOs.androidOs.label=Android ExtractOs.androidOs.label=Android
ExtractOs.androidVolume.label=OS Drive (Android) ExtractOs.androidVolume.label=OS Drive (Android)
ExtractOs.debianLinuxOs.label=Linux (Debian) ExtractOs.debianLinuxOs.label=Linux (Debian)
ExtractOs.debianLinuxVolume.label=OS Drive (Linux Debian) ExtractOs.debianLinuxVolume.label=OS Drive (Linux Debian)
ExtractOs.displayName=OS Info Analyzer
ExtractOs.fedoraLinuxOs.label=Linux (Fedora) ExtractOs.fedoraLinuxOs.label=Linux (Fedora)
ExtractOs.fedoraLinuxVolume.label=OS Drive (Linux Fedora) ExtractOs.fedoraLinuxVolume.label=OS Drive (Linux Fedora)
ExtractOs.gentooLinuxOs.label=Linux (Gentoo) ExtractOs.gentooLinuxOs.label=Linux (Gentoo)
@ -42,7 +42,6 @@ ExtractOs.novellSUSEOs.label=Linux (Novell SUSE)
ExtractOs.novellSUSEVolume.label=OS Drive (Linux Novell SUSE) ExtractOs.novellSUSEVolume.label=OS Drive (Linux Novell SUSE)
ExtractOs.osx.label=Mac OS X ExtractOs.osx.label=Mac OS X
ExtractOs.osxVolume.label=OS Drive (OS X) ExtractOs.osxVolume.label=OS Drive (OS X)
ExtractOs.parentModuleName=Recent Activity
ExtractOs.redhatLinuxOs.label=Linux (Redhat) ExtractOs.redhatLinuxOs.label=Linux (Redhat)
ExtractOs.redhatLinuxVolume.label=OS Drive (Linux Redhat) ExtractOs.redhatLinuxVolume.label=OS Drive (Linux Redhat)
ExtractOs.slackwareLinuxOs.label=Linux (Slackware) ExtractOs.slackwareLinuxOs.label=Linux (Slackware)
@ -59,16 +58,17 @@ ExtractOs.windowsVolume.label=OS Drive (Windows)
ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog)
ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog)
ExtractOS_progressMessage=Checking for OS ExtractOS_progressMessage=Checking for OS
# {0} - sub module name
ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files
ExtractPrefetch_module_name=Windows Prefetch Extractor ExtractPrefetch_module_name=Windows Prefetch Analyzer
ExtractRecycleBin_module_name=Recycle Bin ExtractRecycleBin_module_name=Recycle Bin Analyzer
ExtractRecycleBin_Recyle_Bin_Display_Name=Recycle Bin ExtractRecycleBin_Recyle_Bin_Display_Name=Recycle Bin
ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files. ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.
ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files
ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files
ExtractSafari_Module_Name=Safari ExtractSafari_Module_Name=Safari Analyzer
ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program
ExtractSru_module_name=System Resource Usage Extractor ExtractSru_module_name=System Resource Usage Analyzer
ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program
ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file
ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file
@ -77,6 +77,7 @@ ExtractSru_process_errormsg_write_srudb_dat=Unable to write srudb.dat file
ExtractWebAccountType.role.admin=Administrator role ExtractWebAccountType.role.admin=Administrator role
ExtractWebAccountType.role.moderator=Moderator role ExtractWebAccountType.role.moderator=Moderator role
ExtractWebAccountType.role.user=User role ExtractWebAccountType.role.user=User role
ExtractZone_displayName=\ Zone Identifier Analyzer
ExtractZone_Internet=Internet Zone ExtractZone_Internet=Internet Zone
ExtractZone_Local_Intranet=Local Intranet Zone ExtractZone_Local_Intranet=Local Intranet Zone
ExtractZone_Local_Machine=Local Machine Zone ExtractZone_Local_Machine=Local Machine Zone
@ -86,12 +87,12 @@ ExtractZone_progress_Msg=Extracting :Zone.Identifer files
ExtractZone_Restricted=Restricted Sites Zone ExtractZone_Restricted=Restricted Sites Zone
ExtractZone_Trusted=Trusted Sites Zone ExtractZone_Trusted=Trusted Sites Zone
Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis. Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis.
Jumplist_module_name=Windows Jumplist Extractor Jumplist_module_name=Windows Jumplist Analyzer
OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy.
OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Name=RecentActivity
OpenIDE-Module-Short-Description=Recent Activity finder ingest module OpenIDE-Module-Short-Description=Recent Activity finder ingest module
Chrome.moduleName=Chromium Chrome.moduleName=Chromium Analyzer
Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files. Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files.
Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files. Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files.
Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1} Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1}
@ -108,7 +109,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f
Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files.
Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1}
ExtractIE.moduleName.text=Internet Explorer ExtractIE.moduleName.text=Internet Explorer Analyzer
ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks. ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks.
ExtractIE.parentModuleName.noSpace=RecentActivity ExtractIE.parentModuleName.noSpace=RecentActivity
ExtractIE.parentModuleName=Recent Activity ExtractIE.parentModuleName=Recent Activity
@ -124,7 +125,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer
ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1} ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1}
ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1} ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1}
ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry. ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry.
ExtractRegistry.moduleName.text=Registry ExtractRegistry.moduleName.text=Windows Registry Analyzer
ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0} ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0}
ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1} ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1}
ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1} ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1}
@ -132,7 +133,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity
ExtractRegistry.programName=RegRipper ExtractRegistry.programName=RegRipper
ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1}
ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1}
Firefox.moduleName=FireFox Firefox.moduleName=FireFox Analyzer
Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox.
Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found.
Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1} Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1}
@ -212,6 +213,7 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File
RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1} RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1}
RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity
RecentDocumentsByLnk.parentModuleName=Recent Activity RecentDocumentsByLnk.parentModuleName=Recent Activity
RecentDocumentsByLnk_displayName=Recent Documents by Link Analyzer
Recently_Used_Artifacts_Adobe=Recently opened according to Adobe MRU Recently_Used_Artifacts_Adobe=Recently opened according to Adobe MRU
Recently_Used_Artifacts_Applets=Recently opened according to Applets registry key Recently_Used_Artifacts_Applets=Recently opened according to Applets registry key
Recently_Used_Artifacts_ArcHistory=Recently opened by 7Zip Recently_Used_Artifacts_ArcHistory=Recently opened by 7Zip
@ -223,14 +225,15 @@ Recently_Used_Artifacts_Winrar=Recently opened according to WinRAR MRU
Registry_System_Bam=Recently Executed according to Background Activity Moderator (BAM) Registry_System_Bam=Recently Executed according to Background Activity Moderator (BAM)
RegRipperFullNotFound=Full version RegRipper executable not found. RegRipperFullNotFound=Full version RegRipper executable not found.
RegRipperNotFound=Autopsy RegRipper executable not found. RegRipperNotFound=Autopsy RegRipper executable not found.
# {0} - file name
SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}.
SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer
SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.engineName.none=NONE
SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE
SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3}
SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity
SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity
ExtractWebAccountType.moduleName.text=Web Account Type ExtractWebAccountType.moduleName.text=Web Account Type Analyzer
ExtractWebAccountType.parentModuleName=Recent Activity ExtractWebAccountType.parentModuleName=Recent Activity
Shellbag_Artifact_Display_Name=Shell Bags Shellbag_Artifact_Display_Name=Shell Bags
Shellbag_Key_Attribute_Display_Name=Key Shellbag_Key_Attribute_Display_Name=Key

View File

@ -2,7 +2,7 @@
* *
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* *
* Project Contact/Architect: carrier <at> sleuthkit <dot> org * Project Contact/Architect: carrier <at> sleuthkit <dot> org
* *
@ -151,7 +151,6 @@ final class ChromeCacheExtractor {
} }
@NbBundle.Messages({ @NbBundle.Messages({
"ChromeCacheExtractor.moduleName=ChromeCacheExtractor",
"# {0} - module name", "# {0} - module name",
"# {1} - row number", "# {1} - row number",
"# {2} - table length", "# {2} - table length",
@ -159,7 +158,7 @@ final class ChromeCacheExtractor {
"ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}" "ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}"
}) })
ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) {
moduleName = Bundle.ChromeCacheExtractor_moduleName(); moduleName = NbBundle.getMessage(Chromium.class, "Chrome.moduleName");
this.dataSource = dataSource; this.dataSource = dataSource;
this.context = context; this.context = context;
this.progressBar = progressBar; this.progressBar = progressBar;
@ -415,7 +414,7 @@ final class ChromeCacheExtractor {
progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_artifacts_msg(), artifactsAdded.size())); progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_artifacts_msg(), artifactsAdded.size()));
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard(); Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
try { try {
blackboard.postArtifacts(artifactsAdded, moduleName); blackboard.postArtifacts(artifactsAdded, moduleName, context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.WARNING, String.format("Failed to post cacheIndex artifacts "), ex); //NON-NLS logger.log(Level.WARNING, String.format("Failed to post cacheIndex artifacts "), ex); //NON-NLS
} }

View File

@ -54,8 +54,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
@ -100,7 +98,7 @@ class Chromium extends Extract {
private final Logger logger = Logger.getLogger(this.getClass().getName()); private final Logger logger = Logger.getLogger(this.getClass().getName());
private Content dataSource; private Content dataSource;
private IngestJobContext context; private final IngestJobContext context;
private static final Map<String, String> BROWSERS_MAP = ImmutableMap.<String, String>builder() private static final Map<String, String> BROWSERS_MAP = ImmutableMap.<String, String>builder()
.put("Microsoft Edge", "Microsoft/Edge/User Data/Default") .put("Microsoft Edge", "Microsoft/Edge/User Data/Default")
@ -127,20 +125,19 @@ class Chromium extends Extract {
"Progress_Message_Chrome_Logins=Chrome Logins Browser {0}", "Progress_Message_Chrome_Logins=Chrome Logins Browser {0}",
"Progress_Message_Chrome_Cache=Chrome Cache",}) "Progress_Message_Chrome_Cache=Chrome Cache",})
Chromium() { Chromium(IngestJobContext context) {
super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName")); super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName"), context);
this.context = context;
} }
@Override @Override
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.dataSource = dataSource; this.dataSource = dataSource;
this.context = context;
dataFound = false; dataFound = false;
long ingestJobId = context.getJobId(); long ingestJobId = context.getJobId();
for (Map.Entry<String, String> browser : BROWSERS_MAP.entrySet()) { for (Map.Entry<String, String> browser : BROWSERS_MAP.entrySet()) {
String browserName = browser.getKey(); String browserName = browser.getKey();
String browserLocation = browser.getValue();
progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_History", browserName)); progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_History", browserName));
this.getHistory(browser.getKey(), browser.getValue(), ingestJobId); this.getHistory(browser.getKey(), browser.getValue(), ingestJobId);
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
@ -181,11 +178,11 @@ class Chromium extends Extract {
progressBar.progress(Bundle.Progress_Message_Chrome_Cache()); progressBar.progress(Bundle.Progress_Message_Chrome_Cache());
ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context, progressBar); ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context, progressBar);
chromeCacheExtractor.processCaches(); chromeCacheExtractor.processCaches();
} }
/** /**
* Query for history databases and add artifacts * Query for history databases and add artifacts
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -202,7 +199,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -238,13 +235,13 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome web history artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome web history artifacts file '%s' (id=%d).",
historyFile.getName(), historyFile.getId()), ex); //NON-NLS historyFile.getName(), historyFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile",
this.getName(), historyFile.getName())); this.getDisplayName(), historyFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome web history artifacts file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome web history artifacts file '%s' (id=%d).",
temps, historyFile.getName(), historyFile.getId()), ex); //NON-NLS temps, historyFile.getName(), historyFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile",
this.getName(), historyFile.getName())); this.getDisplayName(), historyFile.getName()));
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
@ -253,8 +250,8 @@ class Chromium extends Extract {
break; break;
} }
List<HashMap<String, Object>> tempList; List<HashMap<String, Object>> tempList;
tempList = this.dbConnect(temps, HISTORY_QUERY); tempList = this.querySQLiteDb(temps, HISTORY_QUERY);
logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) { for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
@ -276,7 +273,7 @@ class Chromium extends Extract {
(NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS (NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, historyFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create history artifact for file (%d)", historyFile.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create history artifact for file (%d)", historyFile.getId()), ex);
} }
@ -291,6 +288,7 @@ class Chromium extends Extract {
/** /**
* Search for bookmark files and make artifacts. * Search for bookmark files and make artifacts.
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -307,7 +305,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -319,7 +317,6 @@ class Chromium extends Extract {
dataFound = true; dataFound = true;
Collection<BlackboardArtifact> bbartifacts = new ArrayList<>(); Collection<BlackboardArtifact> bbartifacts = new ArrayList<>();
int j = 0; int j = 0;
while (j < bookmarkFiles.size()) { while (j < bookmarkFiles.size()) {
AbstractFile bookmarkFile = bookmarkFiles.get(j++); AbstractFile bookmarkFile = bookmarkFiles.get(j++);
if ((bookmarkFile.getSize() == 0) || (bookmarkFile.getName().toLowerCase().contains("-slack")) if ((bookmarkFile.getSize() == 0) || (bookmarkFile.getName().toLowerCase().contains("-slack"))
@ -335,17 +332,17 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome bookmark artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome bookmark artifacts file '%s' (id=%d).",
bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile",
this.getName(), bookmarkFile.getName())); this.getDisplayName(), bookmarkFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome bookmark artifacts file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome bookmark artifacts file '%s' (id=%d).",
temps, bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS temps, bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile",
this.getName(), bookmarkFile.getName())); this.getDisplayName(), bookmarkFile.getName()));
continue; continue;
} }
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getDisplayName(), temps}); //NON-NLS
File dbFile = new File(temps); File dbFile = new File(temps);
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
dbFile.delete(); dbFile.delete();
@ -374,7 +371,7 @@ class Chromium extends Extract {
} catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) { } catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) {
logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); //NON-NLS logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile3", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile3",
this.getName(), bookmarkFile.getName())); this.getDisplayName(), bookmarkFile.getName()));
continue; continue;
} }
@ -419,7 +416,7 @@ class Chromium extends Extract {
RecentActivityExtracterModuleFactory.getModuleName(), domain)); RecentActivityExtracterModuleFactory.getModuleName(), domain));
try { try {
bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create bookmark artifact for file (%d)", bookmarkFile.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create bookmark artifact for file (%d)", bookmarkFile.getId()), ex);
} }
@ -436,6 +433,7 @@ class Chromium extends Extract {
/** /**
* Queries for cookie files and adds artifacts * Queries for cookie files and adds artifacts
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -455,7 +453,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -479,13 +477,13 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome cookie artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome cookie artifacts file '%s' (id=%d).",
cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile",
this.getName(), cookiesFile.getName())); this.getDisplayName(), cookiesFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome cookie artifacts file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome cookie artifacts file '%s' (id=%d).",
temps, cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS temps, cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile",
this.getName(), cookiesFile.getName())); this.getDisplayName(), cookiesFile.getName()));
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
@ -494,8 +492,8 @@ class Chromium extends Extract {
break; break;
} }
List<HashMap<String, Object>> tempList = this.dbConnect(temps, COOKIE_QUERY); List<HashMap<String, Object>> tempList = this.querySQLiteDb(temps, COOKIE_QUERY);
logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) { for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL,
@ -519,7 +517,7 @@ class Chromium extends Extract {
RecentActivityExtracterModuleFactory.getModuleName(), domain)); RecentActivityExtracterModuleFactory.getModuleName(), domain));
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create cookie artifact for file (%d)", cookiesFile.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create cookie artifact for file (%d)", cookiesFile.getId()), ex);
} }
@ -535,6 +533,7 @@ class Chromium extends Extract {
/** /**
* Queries for download files and adds artifacts * Queries for download files and adds artifacts
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -551,7 +550,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -577,13 +576,13 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome download artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome download artifacts file '%s' (id=%d).",
downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1",
this.getName(), downloadFile.getName())); this.getDisplayName(), downloadFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome download artifacts file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome download artifacts file '%s' (id=%d).",
temps, downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS temps, downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1",
this.getName(), downloadFile.getName())); this.getDisplayName(), downloadFile.getName()));
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
@ -595,12 +594,12 @@ class Chromium extends Extract {
List<HashMap<String, Object>> tempList; List<HashMap<String, Object>> tempList;
if (isChromePreVersion30(temps)) { if (isChromePreVersion30(temps)) {
tempList = this.dbConnect(temps, DOWNLOAD_QUERY); tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY);
} else { } else {
tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V30); tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY_V30);
} }
logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) { for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
String fullPath = result.get("full_path").toString(); //NON-NLS String fullPath = result.get("full_path").toString(); //NON-NLS
@ -630,7 +629,7 @@ class Chromium extends Extract {
// find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact.
try { try {
BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, downloadFile, bbattributes);
bbartifacts.add(webDownloadArtifact); bbartifacts.add(webDownloadArtifact);
String normalizedFullPath = FilenameUtils.normalize(fullPath, true); String normalizedFullPath = FilenameUtils.normalize(fullPath, true);
for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) { for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) {
@ -652,6 +651,7 @@ class Chromium extends Extract {
/** /**
* Gets user logins from Login Data sqlite database * Gets user logins from Login Data sqlite database
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -670,7 +670,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -694,13 +694,13 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome login artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome login artifacts file '%s' (id=%d).",
loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
this.getName(), loginDataFile.getName())); this.getDisplayName(), loginDataFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome login artifacts file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome login artifacts file '%s' (id=%d).",
temps, loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS temps, loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
this.getName(), loginDataFile.getName())); this.getDisplayName(), loginDataFile.getName()));
continue; continue;
} }
File dbFile = new File(temps); File dbFile = new File(temps);
@ -708,8 +708,8 @@ class Chromium extends Extract {
dbFile.delete(); dbFile.delete();
break; break;
} }
List<HashMap<String, Object>> tempList = this.dbConnect(temps, LOGIN_QUERY); List<HashMap<String, Object>> tempList = this.querySQLiteDb(temps, LOGIN_QUERY);
logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) { for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@ -741,7 +741,7 @@ class Chromium extends Extract {
RecentActivityExtracterModuleFactory.getModuleName(), browser)); RecentActivityExtracterModuleFactory.getModuleName(), browser));
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create service account artifact for file (%d)", loginDataFile.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create service account artifact for file (%d)", loginDataFile.getId()), ex);
} }
@ -758,6 +758,7 @@ class Chromium extends Extract {
/** /**
* Gets and parses Autofill data from 'Web Data' database, and creates * Gets and parses Autofill data from 'Web Data' database, and creates
* TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS artifacts * TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS artifacts
*
* @param browser * @param browser
* @param browserLocation * @param browserLocation
* @param ingestJobId The ingest job id. * @param ingestJobId The ingest job id.
@ -776,7 +777,7 @@ class Chromium extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = NbBundle.getMessage(this.getClass(), "Chrome.getAutofills.errMsg.errGettingFiles"); String msg = NbBundle.getMessage(this.getClass(), "Chrome.getAutofills.errMsg.errGettingFiles");
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
this.addErrorMessage(this.getName() + ": " + msg); this.addErrorMessage(this.getDisplayName() + ": " + msg);
return; return;
} }
@ -801,13 +802,13 @@ class Chromium extends Extract {
logger.log(Level.WARNING, String.format("Error reading Chrome Autofill artifacts file '%s' (id=%d).", logger.log(Level.WARNING, String.format("Error reading Chrome Autofill artifacts file '%s' (id=%d).",
webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getAutofill.errMsg.errAnalyzingFiles", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getAutofill.errMsg.errAnalyzingFiles",
this.getName(), webDataFile.getName())); this.getDisplayName(), webDataFile.getName()));
continue; continue;
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome Web data file '%s' (id=%d).", logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome Web data file '%s' (id=%d).",
tempFilePath, webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS tempFilePath, webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles",
this.getName(), webDataFile.getName())); this.getDisplayName(), webDataFile.getName()));
continue; continue;
} }
File dbFile = new File(tempFilePath); File dbFile = new File(tempFilePath);
@ -866,8 +867,8 @@ class Chromium extends Extract {
String autoFillquery = (isSchemaV8X) ? AUTOFILL_QUERY_V8X String autoFillquery = (isSchemaV8X) ? AUTOFILL_QUERY_V8X
: AUTOFILL_QUERY; : AUTOFILL_QUERY;
List<HashMap<String, Object>> autofills = this.dbConnect(dbFilePath, autoFillquery); List<HashMap<String, Object>> autofills = this.querySQLiteDb(dbFilePath, autoFillquery);
logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, autofills.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, autofills.size()}); //NON-NLS
for (HashMap<String, Object> result : autofills) { for (HashMap<String, Object> result : autofills) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@ -905,7 +906,7 @@ class Chromium extends Extract {
// Add an artifact // Add an artifact
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create web form autopfill artifact for file (%d)", webDataFile.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create web form autopfill artifact for file (%d)", webDataFile.getId()), ex);
} }
@ -936,12 +937,12 @@ class Chromium extends Extract {
WebBrowserArtifactsHelper helper = new WebBrowserArtifactsHelper( WebBrowserArtifactsHelper helper = new WebBrowserArtifactsHelper(
Case.getCurrentCaseThrows().getSleuthkitCase(), Case.getCurrentCaseThrows().getSleuthkitCase(),
NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
webDataFile webDataFile, context.getJobId()
); );
// Get Web form addresses // Get Web form addresses
List<HashMap<String, Object>> addresses = this.dbConnect(dbFilePath, webformAddressQuery); List<HashMap<String, Object>> addresses = this.querySQLiteDb(dbFilePath, webformAddressQuery);
logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, addresses.size()}); //NON-NLS logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, addresses.size()}); //NON-NLS
for (HashMap<String, Object> result : addresses) { for (HashMap<String, Object> result : addresses) {
fieldEncrypted = false; fieldEncrypted = false;
@ -1007,9 +1008,12 @@ class Chromium extends Extract {
} }
/** /**
* Check the type of the object and if it is bytes then it is encrypted and return the string and * Check the type of the object and if it is bytes then it is encrypted and
* set flag that field and file are encrypted * return the string and set flag that field and file are encrypted
* @param dataValue Object to be checked, the object is from a database result set *
* @param dataValue Object to be checked, the object is from a database
* result set
*
* @return the actual string or an empty string * @return the actual string or an empty string
*/ */
private String processFields(Object dataValue) { private String processFields(Object dataValue) {
@ -1025,7 +1029,7 @@ class Chromium extends Extract {
private boolean isChromePreVersion30(String temps) { private boolean isChromePreVersion30(String temps) {
String query = "PRAGMA table_info(downloads)"; //NON-NLS String query = "PRAGMA table_info(downloads)"; //NON-NLS
List<HashMap<String, Object>> columns = this.dbConnect(temps, query); List<HashMap<String, Object>> columns = this.querySQLiteDb(temps, query);
for (HashMap<String, Object> col : columns) { for (HashMap<String, Object> col : columns) {
if (col.get("name").equals("url")) { //NON-NLS if (col.get("name").equals("url")) { //NON-NLS
return true; return true;

View File

@ -42,7 +42,7 @@ import org.sleuthkit.datamodel.TskData;
* systems the images may have been used by. * systems the images may have been used by.
* *
*/ */
@Messages({"DataSourceUsageAnalyzer.parentModuleName=Recent Activity"}) @Messages({"DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer"})
class DataSourceUsageAnalyzer extends Extract { class DataSourceUsageAnalyzer extends Extract {
private static final Logger logger = Logger.getLogger(DataSourceUsageAnalyzer.class.getName()); private static final Logger logger = Logger.getLogger(DataSourceUsageAnalyzer.class.getName());
@ -56,37 +56,38 @@ class DataSourceUsageAnalyzer extends Extract {
{".android_secure", "android", "audio", {".android_secure", "android", "audio",
"photos", "dcim", "music", "pictures", "videos"}; //NON-NLS "photos", "dcim", "music", "pictures", "videos"}; //NON-NLS
private Content dataSource; private Content dataSource;
private final IngestJobContext context;
DataSourceUsageAnalyzer(IngestJobContext context) {
super(Bundle.DataSourceUsageAnalyzer_displayName(), context);
this.context = context;
}
@Messages({ @Messages({
"# {0} - OS name", "# {0} - OS name",
"DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})", "DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})",
"Progress_Message_Analyze_Usage=Data Sources Usage Analysis",}) "Progress_Message_Analyze_Usage=Data Sources Usage Analysis",})
@Override @Override
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.dataSource = dataSource; this.dataSource = dataSource;
try { try {
progressBar.progress(Bundle.Progress_Message_Analyze_Usage()); progressBar.progress(Bundle.Progress_Message_Analyze_Usage());
createDataSourceUsageArtifacts(context); createDataSourceUsageArtifacts();
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to check if datasource contained a volume with operating system specific files", ex); logger.log(Level.WARNING, "Failed to check if datasource contained a volume with operating system specific files", ex);
} }
} }
private void createDataSourceUsageArtifacts(IngestJobContext context) throws TskCoreException { private void createDataSourceUsageArtifacts() throws TskCoreException {
createOSInfoDataSourceUsageArtifacts(); createOSInfoDataSourceUsageArtifacts();
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
return; return;
} }
createAndroidMediaCardArtifacts(); createAndroidMediaCardArtifacts();
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
return; return;
} }
createDJIDroneDATArtitifacts(); createDJIDroneDATArtitifacts();
} }
@ -146,9 +147,9 @@ class DataSourceUsageAnalyzer extends Extract {
} }
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION, bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
Bundle.DataSourceUsageAnalyzer_parentModuleName(), getRAModuleName(),
dataSourceUsageDescription)); //NON-NLS dataSourceUsageDescription)); //NON-NLS
postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes)); postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes));
} }
/** /**

View File

@ -44,7 +44,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
@ -59,7 +58,7 @@ import org.sleuthkit.autopsy.url.analytics.DomainCategory;
* is created. * is created.
*/ */
@Messages({ @Messages({
"DomainCategoryRunner_moduleName_text=DomainCategoryRunner", "DomainCategoryRunner_moduleName_text=Domain Category Analyzer",
"DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types", "DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types",
"DomainCategoryRunner_parentModuleName=Recent Activity" "DomainCategoryRunner_parentModuleName=Recent Activity"
}) })
@ -98,13 +97,15 @@ class DomainCategoryRunner extends Extract {
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY) BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)
.map(BlackboardArtifact.Type::new) .map(BlackboardArtifact.Type::new)
.collect(Collectors.toList()); .collect(Collectors.toList());
private final IngestJobContext context;
/** /**
* Get seconds from epoch from the mapping for the attribute type id. * Get seconds from epoch from the mapping for the attribute type id.
* *
* @param attrMap A mapping of attribute type id to BlackboardAttribute for * @param attrMap A mapping of attribute type id to BlackboardAttribute
* an artifact. * for an artifact.
* @param attrTypeId The attribute type id to fetch. * @param attrTypeId The attribute type id to fetch.
*
* @return The time in seconds from epoch or 0 if cannot be found. * @return The time in seconds from epoch or 0 if cannot be found.
*/ */
private static long getTimeOrZero(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) { private static long getTimeOrZero(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) {
@ -119,9 +120,10 @@ class DomainCategoryRunner extends Extract {
/** /**
* Get string for attribute type id or "" if cannot be determined. * Get string for attribute type id or "" if cannot be determined.
* *
* @param attrMap A mapping of attribute type id to BlackboardAttribute for * @param attrMap A mapping of attribute type id to BlackboardAttribute
* an artifact. * for an artifact.
* @param attrTypeId The attribute type id to fetch. * @param attrTypeId The attribute type id to fetch.
*
* @return The string value or "" if cannot be determined or null. * @return The string value or "" if cannot be determined or null.
*/ */
private static String getStringOrEmpty(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) { private static String getStringOrEmpty(Map<Integer, BlackboardAttribute> attrMap, int attrTypeId) {
@ -174,14 +176,14 @@ class DomainCategoryRunner extends Extract {
}; };
private Content dataSource; private Content dataSource;
private IngestJobContext context;
private List<DomainCategorizer> domainProviders = Collections.emptyList(); private List<DomainCategorizer> domainProviders = Collections.emptyList();
/** /**
* Main constructor. * Main constructor.
*/ */
DomainCategoryRunner() { DomainCategoryRunner(IngestJobContext context) {
super(Bundle.DomainCategoryRunner_moduleName_text(), context);
this.context = context;
} }
/** /**
@ -189,6 +191,7 @@ class DomainCategoryRunner extends Extract {
* determined, returns null. * determined, returns null.
* *
* @param urlString The url string. * @param urlString The url string.
*
* @return The host or null if cannot be determined. * @return The host or null if cannot be determined.
*/ */
private String getHost(String urlString) { private String getHost(String urlString) {
@ -219,6 +222,7 @@ class DomainCategoryRunner extends Extract {
* *
* @param domain The domain for the item. * @param domain The domain for the item.
* @param host The host for the item. * @param host The host for the item.
*
* @return The domain category result or null if none can be determined. * @return The domain category result or null if none can be determined.
*/ */
private DomainCategory findCategory(String domain, String host) { private DomainCategory findCategory(String domain, String host) {
@ -252,8 +256,10 @@ class DomainCategoryRunner extends Extract {
* Main constructor. * Main constructor.
* *
* @param abstractFile The parent file of the artifact. * @param abstractFile The parent file of the artifact.
* @param host The host of the artifact found in the url attribute. * @param host The host of the artifact found in the url
* @param domain The domain of the artifact in the TSK_DOMAIN attribute. * attribute.
* @param domain The domain of the artifact in the TSK_DOMAIN
* attribute.
*/ */
ArtifactHost(AbstractFile abstractFile, String host, String domain) { ArtifactHost(AbstractFile abstractFile, String host, String domain) {
this.abstractFile = abstractFile; this.abstractFile = abstractFile;
@ -288,8 +294,10 @@ class DomainCategoryRunner extends Extract {
* parent file. * parent file.
* *
* @param artifact The web artifact to parse. * @param artifact The web artifact to parse.
*
* @return The pertinent information or null if important information cannot * @return The pertinent information or null if important information cannot
* be determined. * be determined.
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private ArtifactHost getDomainAndHost(BlackboardArtifact artifact) throws TskCoreException { private ArtifactHost getDomainAndHost(BlackboardArtifact artifact) throws TskCoreException {
@ -338,6 +346,7 @@ class DomainCategoryRunner extends Extract {
* *
* @param items The set of items. * @param items The set of items.
* @param item The item whose existence will be checked in the set. * @param item The item whose existence will be checked in the set.
*
* @return True if item is already contained in 'items'. False if the is * @return True if item is already contained in 'items'. False if the is
* null or if not contained in 'items'. * null or if not contained in 'items'.
*/ */
@ -428,8 +437,8 @@ class DomainCategoryRunner extends Extract {
/** /**
* Adds a TSK_WEB_CATEGORIZATION artifact for the given information. * Adds a TSK_WEB_CATEGORIZATION artifact for the given information.
* *
* @param artHost Pertinent details for the artifact (i.e. host, domain, * @param artHost Pertinent details for the artifact (i.e. host,
* parent file). * domain, parent file).
* @param domainCategory The category for this host/domain. * @param domainCategory The category for this host/domain.
*/ */
private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) throws TskCoreException { private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) throws TskCoreException {
@ -439,25 +448,23 @@ class DomainCategoryRunner extends Extract {
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HOST, moduleName, artHost.getHost()), new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HOST, moduleName, artHost.getHost()),
new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, moduleName, domainCategory) new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, moduleName, domainCategory)
); );
postArtifact(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes)); postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes));
} }
@Override @Override
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.dataSource = dataSource; this.dataSource = dataSource;
this.context = context;
progressBar.progress(Bundle.DomainCategoryRunner_Progress_Message_Domain_Types()); progressBar.progress(Bundle.DomainCategoryRunner_Progress_Message_Domain_Types());
this.findDomainTypes(); this.findDomainTypes();
} }
@Override @Override
void configExtractor() throws IngestModule.IngestModuleException { void startUp() throws IngestModule.IngestModuleException {
// lookup all providers, filter null providers, and sort providers // lookup all providers, filter null providers, and sort providers
Collection<? extends DomainCategorizer> lookupCollection = Lookup.getDefault().lookupAll(DomainCategorizer.class); Collection<? extends DomainCategorizer> lookupCollection = Lookup.getDefault().lookupAll(DomainCategorizer.class);
Collection<? extends DomainCategorizer> lookupList = (lookupCollection == null) ? Collection<? extends DomainCategorizer> lookupList = (lookupCollection == null)
Collections.emptyList() : ? Collections.emptyList()
lookupCollection; : lookupCollection;
// this will be the class instance of the foundProviders // this will be the class instance of the foundProviders
List<DomainCategorizer> foundProviders = new ArrayList<>(); List<DomainCategorizer> foundProviders = new ArrayList<>();
@ -477,9 +484,9 @@ class DomainCategoryRunner extends Extract {
.filter(categorizer -> categorizer != null) .filter(categorizer -> categorizer != null)
.filter(categorizer -> { .filter(categorizer -> {
String className = categorizer.getClass().getName(); String className = categorizer.getClass().getName();
return !className.contains(CUSTOM_CATEGORIZER_PATH) && return !className.contains(CUSTOM_CATEGORIZER_PATH)
!className.equals(DefaultPriorityDomainCategorizer.class.getName()) && && !className.equals(DefaultPriorityDomainCategorizer.class.getName())
!className.equals(DefaultDomainCategorizer.class.getName()); && !className.equals(DefaultDomainCategorizer.class.getName());
}) })
.sorted((a, b) -> a.getClass().getName().compareToIgnoreCase(b.getClass().getName())) .sorted((a, b) -> a.getClass().getName().compareToIgnoreCase(b.getClass().getName()))
.forEach(foundProviders::add); .forEach(foundProviders::add);
@ -491,8 +498,8 @@ class DomainCategoryRunner extends Extract {
try { try {
provider.initialize(); provider.initialize();
} catch (DomainCategorizerException ex) { } catch (DomainCategorizerException ex) {
throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " + throw new IngestModule.IngestModuleException("There was an error instantiating the provider: "
provider.getClass().getSimpleName(), ex); + provider.getClass().getSimpleName(), ex);
} }
} }
@ -500,7 +507,7 @@ class DomainCategoryRunner extends Extract {
} }
@Override @Override
public void complete() { public void shutDown() {
if (this.domainProviders != null) { if (this.domainProviders != null) {
for (DomainCategorizer provider : this.domainProviders) { for (DomainCategorizer provider : this.domainProviders) {
try { try {
@ -510,7 +517,6 @@ class DomainCategoryRunner extends Extract {
} }
} }
} }
super.shutDown();
logger.info("Domain categorization completed."); //NON-NLS
} }
} }

View File

@ -35,9 +35,7 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
@ -47,222 +45,195 @@ import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; import org.sleuthkit.datamodel.BlackboardArtifact.Category;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.Score;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
abstract class Extract { abstract class Extract {
protected Case currentCase; protected final Case currentCase;
protected SleuthkitCase tskCase; protected final SleuthkitCase tskCase;
protected Blackboard blackboard; private static final Logger logger = Logger.getLogger(Extract.class.getName());
private final Logger logger = Logger.getLogger(this.getClass().getName());
private final ArrayList<String> errorMessages = new ArrayList<>(); private final ArrayList<String> errorMessages = new ArrayList<>();
private String moduleName = ""; private final String displayName;
boolean dataFound = false; protected boolean dataFound = false;
private RAOsAccountCache osAccountCache = null; private final IngestJobContext context;
Extract() { /**
this(""); * Constructs the super class part of an extractor used by the Recent
} * Activity ingest module to do its analysis for an ingest job.
*
Extract(String moduleName) { * @param displayName The display name of the extractor.
this.moduleName = moduleName; * @param context The ingest job context.
} */
Extract(String displayName, IngestJobContext context) {
final void init() throws IngestModuleException { this.displayName = displayName;
try { this.context = context;
currentCase = Case.getCurrentCaseThrows(); currentCase = Case.getCurrentCase();
tskCase = currentCase.getSleuthkitCase(); tskCase = currentCase.getSleuthkitCase();
blackboard = tskCase.getBlackboard();
} catch (NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.Extract_indexError_message(), ex);
}
configExtractor();
} }
/** /**
* Override to add any module-specific configuration * Starts up this extractor. Called by the Recent Activity ingest module in
* its startUp() method.
* *
* @throws IngestModuleException * @throws IngestModuleException The exception is thrown if there is an
* error starting up the extractor.
*/ */
void configExtractor() throws IngestModuleException { void startUp() throws IngestModuleException {
} }
/** /**
* Extractor process method intended to mirror the Ingest process method. * Analyzes the given data source. Called by the Recent Activity ingest
* module in its process() method.
* *
* Subclasses should overload just the abstract version of the method. * @param dataSource The data source to be analyzed.
* * @param progressBar A progress object that can be used to report analysis
* @param dataSource The data source object to ingest. * progress.
* @param context The the context for the current job.
* @param progressBar A handle to the progressBar for the module to update with status.
* @param osAccountCache The OsAccountCache.
*/ */
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar, RAOsAccountCache osAccountCache) { abstract void process(Content dataSource, DataSourceIngestModuleProgress progressBar);
this.osAccountCache = osAccountCache;
process(dataSource, context, progressBar);
}
abstract void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar); /**
* Shuts down this extractor. Called by the Recent Activity ingest module in
void complete() { * its shutDown() method.
*/
void shutDown() {
} }
/** /**
* Returns a List of string error messages from the inheriting class * Gets any error messages generated by the extractor during processing.
* *
* @return errorMessages returns all error messages logged * @return errorMessages The error message strings.
*/ */
List<String> getErrorMessages() { List<String> getErrorMessages() {
return errorMessages; return Collections.unmodifiableList(errorMessages);
} }
/** /**
* Adds a string to the error message list * Adds an error message to the collection of error messages generated by
* the extractor during processing.
* *
* @param message is an error message represented as a string * @param message The error message.
*/ */
protected void addErrorMessage(String message) { protected void addErrorMessage(String message) {
errorMessages.add(message); errorMessages.add(message);
} }
/** /**
* Generic method for creating artifacts. * Creates an artifact with the given attributes.
* *
* @param type The type of artifact. * @param type The artifact type.
* @param file The file the artifact originated from. * @param content The artifact source/parent.
* @param attributes A list of the attributes to associate with the * @param attributes The attributes.
* artifact.
*
* @return The newly created artifact.
*/
BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException {
return createArtifactWithAttributes(new BlackboardArtifact.Type(type), content, attributes);
}
/**
* Generic method for creating artifacts.
*
* @param type The type of artifact.
* @param content The file the artifact originated from.
* @param attributes A list of the attributes to associate with the
* artifact.
* *
* @return The newly created artifact. * @return The newly created artifact.
* *
* @throws TskCoreException * @throws TskCoreException This exception is thrown if there is an issue
* creating the artifact.
*/ */
BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException { BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection<BlackboardAttribute> attributes) throws TskCoreException {
switch (type.getCategory()) { if (type.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) {
case DATA_ARTIFACT:
return content.newDataArtifact(type, attributes); return content.newDataArtifact(type, attributes);
case ANALYSIS_RESULT: } else if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) {
return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult(); return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult();
default: } else {
throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName()); throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName());
} }
} }
/** /**
* Returns and associated artifact for the given artifact. * Creates an associated artifact for a given artifact.
* *
* @param content The content to create the artifact from. * @param content The artifact source/parent.
* @param artifact The artifact to associate the new artifact with. * @param artifact The artifact with which to associate the new artifact.
* *
* @return The newly created artifact. * @return The newly created artifact.
* *
* @throws TskCoreException * @throws TskCoreException This exception is thrown if there is an issue
* creating the artifact.
*/ */
BlackboardArtifact createAssociatedArtifact(Content content, BlackboardArtifact artifact) throws TskCoreException { BlackboardArtifact createAssociatedArtifact(Content content, BlackboardArtifact artifact) throws TskCoreException {
return createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, BlackboardAttribute attribute = new BlackboardAttribute(BlackboardAttribute.Type.TSK_ASSOCIATED_ARTIFACT, getRAModuleName(), artifact.getArtifactID());
RecentActivityExtracterModuleFactory.getModuleName(), artifact.getArtifactID()))); return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(attribute));
} }
/** /**
* Method to post a blackboard artifact to the blackboard. * Posts an artifact to the blackboard.
* *
* @param bbart Blackboard artifact to be indexed. Nothing will occure if a null object is passed in. * @param artifact The artifact.
*/ */
@Messages({"Extract.indexError.message=Failed to index artifact for keyword search.", void postArtifact(BlackboardArtifact artifact) {
"Extract.noOpenCase.errMsg=No open case available."}) if (artifact != null && !context.dataArtifactIngestIsCancelled()) {
void postArtifact(BlackboardArtifact bbart) { postArtifacts(Collections.singleton(artifact));
if(bbart == null) {
return;
}
try {
// index the artifact for keyword search
blackboard.postArtifact(bbart, getName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bbart.getDisplayName(), ex); //NON-NLS
} }
} }
/** /**
* Method to post a list of BlackboardArtifacts to the blackboard. * Posts a collection of artifacts to the blackboard.
* *
* @param artifacts A list of artifacts. IF list is empty or null, the function will return. * @param artifacts The artifacts.
*/ */
void postArtifacts(Collection<BlackboardArtifact> artifacts) { void postArtifacts(Collection<BlackboardArtifact> artifacts) {
if(artifacts == null || artifacts.isEmpty()) { if (artifacts != null && !artifacts.isEmpty() && !context.dataArtifactIngestIsCancelled()) {
return;
}
try { try {
blackboard.postArtifacts(artifacts, getName()); tskCase.getBlackboard().postArtifacts(artifacts, RecentActivityExtracterModuleFactory.getModuleName(), context.getJobId());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to post blackboard artifacts", ex); //NON-NLS logger.log(Level.SEVERE, "Failed to post artifacts", ex); //NON-NLS
}
} }
} }
/** /**
* Connects to a SQLite database file (e.g., an application database) and
* executes a query.
*
* Returns a List from a result set based on sql query. This is used to * Returns a List from a result set based on sql query. This is used to
* query sqlite databases storing user recent activity data, such as in * query sqlite databases storing user recent activity data, such as in
* firefox sqlite db * firefox sqlite db
* *
* @param path is the string path to the sqlite db file * @param path The path to the SQLite database file
* @param query is a sql string query that is to be run * @param query The SQL query to be executed.
* *
* @return list is the ArrayList that contains the resultset information in * @return A list of maps that represents the query results. Each map entry
* it that the query obtained * consists of a column name as a key and an Object as a column
* value, with empty strings substituted for nulls.
*/ */
protected List<HashMap<String, Object>> dbConnect(String path, String query) { protected List<HashMap<String, Object>> querySQLiteDb(String path, String query) {
ResultSet temprs; ResultSet resultSet;
List<HashMap<String, Object>> list; List<HashMap<String, Object>> list;
String connectionString = "jdbc:sqlite:" + path; //NON-NLS String connectionString = "jdbc:sqlite:" + path; //NON-NLS
SQLiteDBConnect tempdbconnect = null; SQLiteDBConnect dbConnection = null;
try { try {
tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS dbConnection = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS
temprs = tempdbconnect.executeQry(query); resultSet = dbConnection.executeQry(query);
list = this.resultSetToArrayList(temprs); list = resultSetToArrayList(resultSet);
} catch (SQLException ex) { } catch (SQLException ex) {
logger.log(Level.WARNING, "Error while trying to read into a sqlite db." + connectionString, ex); //NON-NLS logger.log(Level.WARNING, "Error while trying to read into a sqlite db." + connectionString, ex); //NON-NLS
return Collections.<HashMap<String, Object>>emptyList(); return Collections.<HashMap<String, Object>>emptyList();
} } finally {
finally { if (dbConnection != null) {
if (tempdbconnect != null) { dbConnection.closeConnection();
tempdbconnect.closeConnection();
} }
} }
return list; return list;
} }
/** /**
* Returns a List of AbstractFile objects from TSK based on sql query. * Converts a JDBC result set to a list of maps. Each map entry consists of
* a column name as a key and an Object as a column value, with empty
* strings substituted for nulls.
* *
* @param rs is the resultset that needs to be converted to an arraylist * @param rs The result set.
* *
* @return list returns the arraylist built from the converted resultset * @return The list of maps.
*/ */
private List<HashMap<String, Object>> resultSetToArrayList(ResultSet rs) throws SQLException { private List<HashMap<String, Object>> resultSetToArrayList(ResultSet rs) throws SQLException {
ResultSetMetaData md = rs.getMetaData(); ResultSetMetaData md = rs.getMetaData();
int columns = md.getColumnCount(); int columns = md.getColumnCount();
List<HashMap<String, Object>> list = new ArrayList<>(50); List<HashMap<String, Object>> results = new ArrayList<>(50);
while (rs.next()) { while (rs.next()) {
HashMap<String, Object> row = new HashMap<>(columns); HashMap<String, Object> row = new HashMap<>(columns);
for (int i = 1; i <= columns; ++i) { for (int i = 1; i <= columns; ++i) {
@ -272,63 +243,76 @@ abstract class Extract {
row.put(md.getColumnName(i), rs.getObject(i)); row.put(md.getColumnName(i), rs.getObject(i));
} }
} }
list.add(row); results.add(row);
} }
return results;
return list;
} }
/** /**
* Returns the name of the inheriting class * Gets the display name of this extractor.
* *
* @return Gets the moduleName set in the moduleName data member * @return The display name.
*/ */
protected String getName() { protected String getDisplayName() {
return moduleName; return displayName;
} }
/**
* Get the display name of the Recent Activity module.
*
* @return The display name.
*/
protected String getRAModuleName() { protected String getRAModuleName() {
return RecentActivityExtracterModuleFactory.getModuleName(); return RecentActivityExtracterModuleFactory.getModuleName();
} }
/** /**
* Returns the state of foundData * Gets the value of a flag indicating whether or not this extractor found
* @return * any data.
*
* @return True or false.
*/ */
public boolean foundData() { public boolean foundData() {
return dataFound; return dataFound;
} }
/** /**
* Sets the value of foundData * Sets the value of a flag indicating whether or not this extractor found
* @param foundData * any data.
*
* @param foundData True or false.
*/ */
protected void setFoundData(boolean foundData) { protected void setFoundData(boolean foundData) {
dataFound = foundData; dataFound = foundData;
} }
/** /**
* Returns the current case instance * Gets the current case.
* @return Current case instance *
* @return The current case.
*/ */
protected Case getCurrentCase() { protected Case getCurrentCase() {
return this.currentCase; return this.currentCase;
} }
/** /**
* Creates a list of attributes for a history artifact. * Creates a list of attributes for a web history artifact.
* *
* @param url * @param url The URL, may be null.
* @param accessTime Time url was accessed * @param accessTime The time the URL was accessed, may be null.
* @param referrer referred url * @param referrer The referring URL, may be null.
* @param title title of the page * @param title Title of the returned resource, may be null.
* @param programName module name * @param programName The program that executed the request, may be the
* @param domain domain of the url * empty string, may be null.
* @param user user that accessed url * @param domain The domain of the URL, may be null.
* @return List of BlackboardAttributes for giving attributes * @param user The user that accessed URL, may be null.
* @throws TskCoreException *
* @return The list of attributes.
*
* @throws TskCoreException The exception is thrown if there is an issue
* creating the attributes.
*/ */
protected Collection<BlackboardAttribute> createHistoryAttribute(String url, Long accessTime, protected Collection<BlackboardAttribute> createHistoryAttributes(String url, Long accessTime,
String referrer, String title, String programName, String domain, String user) throws TskCoreException { String referrer, String title, String programName, String domain, String user) throws TskCoreException {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@ -365,15 +349,16 @@ abstract class Extract {
} }
/** /**
* Creates a list of attributes for a cookie. * Creates a list of attributes for a web cookie artifact.
* *
* @param url cookie url * @param url The cookie url, may be null.
* @param creationTime cookie creation time * @param creationTime The cookie creation time, may be null.
* @param name cookie name * @param name The cookie name, may be null.
* @param value cookie value * @param value The cookie value, may be null.
* @param programName Name of the module creating the attribute * @param programName The program that created the cookie, may be null.
* @param domain Domain of the URL * @param domain The domain of the cookie URL, may be null.
* @return List of BlackboarAttributes for the passed in attributes *
* @return The list of attributes.
*/ */
protected Collection<BlackboardAttribute> createCookieAttributes(String url, protected Collection<BlackboardAttribute> createCookieAttributes(String url,
Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) { Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) {
@ -418,14 +403,16 @@ abstract class Extract {
} }
/** /**
* Creates a list of bookmark attributes from the passed in parameters. * Creates a list of attributes for a web bookmark artifact.
* *
* @param url Bookmark url * @param url The bookmark URL, may be null.
* @param title Title of the bookmarked page * @param title The title of the bookmarked page, may be null.
* @param creationTime Date & time at which the bookmark was created * @param creationTime The date and time at which the bookmark was created,
* @param programName Name of the module creating the attribute * may be null.
* @param domain The domain of the bookmark's url * @param programName The program that created the bookmark, may be null.
* @return A collection of bookmark attributes * @param domain The domain of the bookmark's URL, may be null.
*
* @return The list of attributes.
*/ */
protected Collection<BlackboardAttribute> createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) { protected Collection<BlackboardAttribute> createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@ -455,14 +442,15 @@ abstract class Extract {
} }
/** /**
* Creates a list of the attributes of a downloaded file * Creates a list of attributes for a web download artifact.
* *
* @param path * @param path The path of the downloaded file, may be null.
* @param url URL of the downloaded file * @param url The URL of the downloaded file, may be null.
* @param accessTime Time the download occurred * @param accessTime The time the download occurred, may be null.
* @param domain Domain of the URL * @param domain The domain of the URL, may be null.
* @param programName Name of the module creating the attribute * @param programName The program that downloaded the file, may be null.
* @return A collection of attributes of a downloaded file *
* @return The list of attributes.
*/ */
protected Collection<BlackboardAttribute> createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) { protected Collection<BlackboardAttribute> createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@ -498,42 +486,22 @@ abstract class Extract {
} }
/** /**
* Creates a list of the attributes for source of a downloaded file * Writes a file to disk in this extractor's dedicated temp directory within
* the Recent Activity ingest modules temp directory. The object ID of the
* file is appended to the file name for uniqueness.
* *
* @param url source URL of the downloaded file * @param file The file.
* @return A collection of attributes for source of a downloaded file
*/
protected Collection<BlackboardAttribute> createDownloadSourceAttributes(String url) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL,
RecentActivityExtracterModuleFactory.getModuleName(),
(url != null) ? url : "")); //NON-NLS
return bbattributes;
}
/**
* Create temporary file for the given AbstractFile. The new file will be
* created in the temp directory for the module with a unique file name.
* *
* @param context * @return A File object that represents the file on disk.
* @param file *
* @param IngestJobId The ingest job id. * @throws IOException Exception thrown if there is a problem writing the
* @return Newly created copy of the AbstractFile * file to disk.
* @throws IOException
*/ */
protected File createTemporaryFile(IngestJobContext context, AbstractFile file, long ingestJobId) throws IOException{ protected File createTemporaryFile(AbstractFile file) throws IOException {
Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath( Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(getCurrentCase(), getDisplayName(), context.getJobId()), file.getName() + file.getId() + file.getNameExtension());
getCurrentCase(), getName(), ingestJobId), file.getName() + file.getId() + file.getNameExtension());
java.io.File tempFile = tempFilePath.toFile(); java.io.File tempFile = tempFilePath.toFile();
try {
ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled); ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled);
} catch (IOException ex) {
throw new IOException("Error writingToFile: " + file, ex); //NON-NLS
}
return tempFile; return tempFile;
} }
} }

View File

@ -39,8 +39,6 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.openide.modules.InstalledFileLocator; import org.openide.modules.InstalledFileLocator;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
@ -65,7 +63,7 @@ final class ExtractEdge extends Extract {
private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName()); private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName());
private Content dataSource; private Content dataSource;
private IngestJobContext context; private final IngestJobContext context;
private HashMap<String, ArrayList<String>> containersTable; private HashMap<String, ArrayList<String>> containersTable;
private static final String EDGE = "Edge"; //NON-NLS private static final String EDGE = "Edge"; //NON-NLS
@ -114,32 +112,31 @@ final class ExtractEdge extends Extract {
"ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file", "ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file",
"ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file", "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file",
"ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file", "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file",
"ExtractEdge_Module_Name=Microsoft Edge", "ExtractEdge_Module_Name=Microsoft Edge Analyzer",
"ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history", "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history",
"Progress_Message_Edge_History=Microsoft Edge History", "Progress_Message_Edge_History=Microsoft Edge History",
"Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks", "Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks",
"Progress_Message_Edge_Cookies=Microsoft Edge Cookies", "Progress_Message_Edge_Cookies=Microsoft Edge Cookies",})
})
/** /**
* Extract the bookmarks, cookies, downloads and history from Microsoft Edge * Extract the bookmarks, cookies, downloads and history from Microsoft Edge
*/ */
ExtractEdge() { ExtractEdge(IngestJobContext context) {
super(Bundle.ExtractEdge_Module_Name()); super(Bundle.ExtractEdge_Module_Name(), context);
this.context = context;
} }
@Override @Override
protected String getName() { protected String getDisplayName() {
return Bundle.ExtractEdge_Module_Name(); return Bundle.ExtractEdge_Module_Name();
} }
@Override @Override
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), EDGE, context.getJobId()); String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), EDGE, context.getJobId());
String moduleTempResultDir = Paths.get(moduleTempDir, EDGE_RESULT_FOLDER_NAME).toString(); String moduleTempResultDir = Paths.get(moduleTempDir, EDGE_RESULT_FOLDER_NAME).toString();
this.dataSource = dataSource; this.dataSource = dataSource;
this.context = context;
this.setFoundData(false); this.setFoundData(false);
List<AbstractFile> webCacheFiles = null; List<AbstractFile> webCacheFiles = null;
@ -210,12 +207,12 @@ final class ExtractEdge extends Extract {
* @param webCacheFiles List of case WebCacheV01.dat files * @param webCacheFiles List of case WebCacheV01.dat files
* @param moduleTempDir The temp directory for this module. * @param moduleTempDir The temp directory for this module.
* @param moduleTempResultDir The temp results directory for this module. * @param moduleTempResultDir The temp results directory for this module.
*
* @throws IOException * @throws IOException
* @throws TskCoreException * @throws TskCoreException
*/ */
void processWebCacheDbFile(String eseDumperPath, List<AbstractFile> webCacheFiles, DataSourceIngestModuleProgress progressBar, void processWebCacheDbFile(String eseDumperPath, List<AbstractFile> webCacheFiles, DataSourceIngestModuleProgress progressBar,
String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
for (AbstractFile webCacheFile : webCacheFiles) { for (AbstractFile webCacheFile : webCacheFiles) {
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
@ -266,19 +263,18 @@ final class ExtractEdge extends Extract {
} }
/** /**
* Process spartan.edb ese database file creating artifacts for the bookmarks * Process spartan.edb ese database file creating artifacts for the
* contained within. * bookmarks contained within.
* *
* @param eseDumperPath Path to ESEDatabaseViewer * @param eseDumperPath Path to ESEDatabaseViewer
* @param spartanFiles List of the case spartan.edb files * @param spartanFiles List of the case spartan.edb files
* @param moduleTempDir The temp directory for this module. * @param moduleTempDir The temp directory for this module.
* @param moduleTempResultDir The temp results directory for this module. * @param moduleTempResultDir The temp results directory for this module.
*
* @throws IOException * @throws IOException
* @throws TskCoreException * @throws TskCoreException
*/ */
void processSpartanDbFile(String eseDumperPath, List<AbstractFile> spartanFiles, void processSpartanDbFile(String eseDumperPath, List<AbstractFile> spartanFiles, String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException {
for (AbstractFile spartanFile : spartanFiles) { for (AbstractFile spartanFile : spartanFiles) {
if (context.dataSourceIngestIsCancelled()) { if (context.dataSourceIngestIsCancelled()) {
@ -320,14 +316,15 @@ final class ExtractEdge extends Extract {
* getHistory searches the files with "container" in the file name for lines * getHistory searches the files with "container" in the file name for lines
* with the text "Visited" in them. Note that not all of the container * with the text "Visited" in them. Note that not all of the container
* files, if fact most of them do not, have the browser history in them. * files, if fact most of them do not, have the browser history in them.
*
* @param origFile Original case file * @param origFile Original case file
* @param resultDir Output directory of ESEDatabaseViewer * @param resultDir Output directory of ESEDatabaseViewer
*
* @throws TskCoreException * @throws TskCoreException
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException {
ArrayList<File> historyFiles = getHistoryFiles(resultDir); ArrayList<File> historyFiles = getHistoryFiles(resultDir);
if (historyFiles == null) { if (historyFiles == null) {
return; return;
} }
@ -382,6 +379,7 @@ final class ExtractEdge extends Extract {
* *
* @param origFile Original case file * @param origFile Original case file
* @param resultDir Output directory of ESEDatabaseViewer * @param resultDir Output directory of ESEDatabaseViewer
*
* @throws TskCoreException * @throws TskCoreException
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
@ -427,6 +425,7 @@ final class ExtractEdge extends Extract {
* *
* @param origFile Original case file * @param origFile Original case file
* @param resultDir Output directory of ESEDatabaseViewer * @param resultDir Output directory of ESEDatabaseViewer
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private void getCookies(AbstractFile origFile, File resultDir) throws TskCoreException { private void getCookies(AbstractFile origFile, File resultDir) throws TskCoreException {
@ -486,6 +485,7 @@ final class ExtractEdge extends Extract {
* *
* @param origFile Original case file * @param origFile Original case file
* @param resultDir Output directory of ESEDatabaseViewer * @param resultDir Output directory of ESEDatabaseViewer
*
* @throws TskCoreException * @throws TskCoreException
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
@ -544,7 +544,8 @@ final class ExtractEdge extends Extract {
/** /**
* Find the location of ESEDatabaseViewer.exe * Find the location of ESEDatabaseViewer.exe
* *
* @return Absolute path to ESEDatabaseViewer.exe or null if the file is not found * @return Absolute path to ESEDatabaseViewer.exe or null if the file is not
* found
*/ */
private String getPathForESEDumper() { private String getPathForESEDumper() {
Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME); Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME);
@ -561,6 +562,7 @@ final class ExtractEdge extends Extract {
* Finds all of the WebCacheV01.dat files in the case * Finds all of the WebCacheV01.dat files in the case
* *
* @return A list of WebCacheV01.dat files, possibly empty if none are found * @return A list of WebCacheV01.dat files, possibly empty if none are found
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private List<AbstractFile> fetchWebCacheDBFiles() throws TskCoreException { private List<AbstractFile> fetchWebCacheDBFiles() throws TskCoreException {
@ -573,6 +575,7 @@ final class ExtractEdge extends Extract {
* Finds all of the spartan.edb files in the case * Finds all of the spartan.edb files in the case
* *
* @return A list of spartan files, possibly empty if none are found * @return A list of spartan files, possibly empty if none are found
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private List<AbstractFile> fetchSpartanDBFiles() throws TskCoreException { private List<AbstractFile> fetchSpartanDBFiles() throws TskCoreException {
@ -590,6 +593,7 @@ final class ExtractEdge extends Extract {
* @param dumperPath Path to ESEDatabaseView.exe * @param dumperPath Path to ESEDatabaseView.exe
* @param inputFilePath Path to ese database file to be dumped * @param inputFilePath Path to ese database file to be dumped
* @param outputDir Output directory for dumper * @param outputDir Output directory for dumper
*
* @throws IOException * @throws IOException
*/ */
private void executeDumper(String dumperPath, String inputFilePath, private void executeDumper(String dumperPath, String inputFilePath,
@ -621,7 +625,9 @@ final class ExtractEdge extends Extract {
* @param origFile Original case file * @param origFile Original case file
* @param headers List of table headers * @param headers List of table headers
* @param line CSV string representing a row of history table * @param line CSV string representing a row of history table
*
* @return BlackboardArtifact representing one history table entry * @return BlackboardArtifact representing one history table entry
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException { private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
@ -638,9 +644,9 @@ final class ExtractEdge extends Extract {
String accessTime = rowSplit[index].trim(); String accessTime = rowSplit[index].trim();
Long ftime = parseTimestamp(accessTime); Long ftime = parseTimestamp(accessTime);
return createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, createHistoryAttribute(url, ftime, return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, createHistoryAttributes(url, ftime,
null, null, null, null,
this.getName(), this.getDisplayName(),
NetworkUtils.extractDomain(url), user)); NetworkUtils.extractDomain(url), user));
} }
@ -650,7 +656,9 @@ final class ExtractEdge extends Extract {
* @param origFile Original case file * @param origFile Original case file
* @param headers List of table headers * @param headers List of table headers
* @param line CSV string representing a row of cookie table * @param line CSV string representing a row of cookie table
*
* @return BlackboardArtifact representing one cookie table entry * @return BlackboardArtifact representing one cookie table entry
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException { private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
@ -664,7 +672,7 @@ final class ExtractEdge extends Extract {
String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim()); String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim());
String url = flipDomain(domain); String url = flipDomain(domain);
return createArtifactWithAttributes(TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url))); return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getDisplayName(), NetworkUtils.extractDomain(url)));
} }
/** /**
@ -677,7 +685,9 @@ final class ExtractEdge extends Extract {
* @param origFile Original case file * @param origFile Original case file
* @param headers List of table headers * @param headers List of table headers
* @param line CSV string representing a row of download table * @param line CSV string representing a row of download table
*
* @return BlackboardArtifact representing one download table entry * @return BlackboardArtifact representing one download table entry
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException { private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
@ -698,7 +708,10 @@ final class ExtractEdge extends Extract {
* @param origFile File the table came from ie spartan.edb * @param origFile File the table came from ie spartan.edb
* @param headers List of table column headers * @param headers List of table column headers
* @param line The line or row of the table to parse * @param line The line or row of the table to parse
* @return BlackboardArtifact representation of the passed in line\table row or null if no Bookmark is found *
* @return BlackboardArtifact representation of the passed in line\table row
* or null if no Bookmark is found
*
* @throws TskCoreException * @throws TskCoreException
*/ */
private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException { private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List<String> headers, String line) throws TskCoreException {
@ -712,11 +725,10 @@ final class ExtractEdge extends Extract {
return null; return null;
} }
return createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null, return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null,
this.getName(), NetworkUtils.extractDomain(url))); this.getDisplayName(), NetworkUtils.extractDomain(url)));
} }
/** /**
* Attempt to parse the timestamp. * Attempt to parse the timestamp.
* *
@ -829,6 +841,7 @@ final class ExtractEdge extends Extract {
* Converts a space separated string of hex values to ascii characters. * Converts a space separated string of hex values to ascii characters.
* *
* @param hexString * @param hexString
*
* @return "decoded" string or null if a non-hex value was found * @return "decoded" string or null if a non-hex value was found
*/ */
private String hexToChar(String hexString) { private String hexToChar(String hexString) {
@ -858,6 +871,7 @@ final class ExtractEdge extends Extract {
* there to weed out the "junk". * there to weed out the "junk".
* *
* @param domain * @param domain
*
* @return Correct domain string * @return Correct domain string
*/ */
private String flipDomain(String domain) { private String flipDomain(String domain) {
@ -888,6 +902,7 @@ final class ExtractEdge extends Extract {
* them. * them.
* *
* @param resultDir Path to ESEDatabaseViewer output * @param resultDir Path to ESEDatabaseViewer output
*
* @return List of download table files * @return List of download table files
*/ */
private ArrayList<File> getDownloadFiles(File resultDir) throws FileNotFoundException { private ArrayList<File> getDownloadFiles(File resultDir) throws FileNotFoundException {
@ -898,7 +913,9 @@ final class ExtractEdge extends Extract {
* Returns a list the container files that have history information in them. * Returns a list the container files that have history information in them.
* *
* @param resultDir Path to ESEDatabaseViewer output * @param resultDir Path to ESEDatabaseViewer output
*
* @return List of history table files * @return List of history table files
*
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
private ArrayList<File> getHistoryFiles(File resultDir) throws FileNotFoundException { private ArrayList<File> getHistoryFiles(File resultDir) throws FileNotFoundException {
@ -910,7 +927,10 @@ final class ExtractEdge extends Extract {
* *
* @param resultDir Path to ESEDatabaseViewer output * @param resultDir Path to ESEDatabaseViewer output
* @param type Type of table files * @param type Type of table files
* @return List of table files returns null if no files of that type are found *
* @return List of table files returns null if no files of that type are
* found
*
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
private ArrayList<File> getContainerFiles(File resultDir, String type) throws FileNotFoundException { private ArrayList<File> getContainerFiles(File resultDir, String type) throws FileNotFoundException {
@ -938,7 +958,9 @@ final class ExtractEdge extends Extract {
* files. * files.
* *
* @param resultDir Path to ESEDatabaseViewer output * @param resultDir Path to ESEDatabaseViewer output
* @return Hashmap with Key representing the table type, the value is a list of table ids for that type *
* @return Hashmap with Key representing the table type, the value is a list
* of table ids for that type
*/ */
private HashMap<String, ArrayList<String>> getContainerIDTable(File resultDir) throws FileNotFoundException { private HashMap<String, ArrayList<String>> getContainerIDTable(File resultDir) throws FileNotFoundException {

View File

@ -43,7 +43,6 @@ import java.util.Scanner;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.openide.modules.InstalledFileLocator; import org.openide.modules.InstalledFileLocator;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
@ -72,7 +71,7 @@ class ExtractIE extends Extract {
private static final String RESOURCE_URL_PREFIX = "res://"; private static final String RESOURCE_URL_PREFIX = "res://";
private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
private Content dataSource; private Content dataSource;
private IngestJobContext context; private final IngestJobContext context;
@Messages({ @Messages({
"Progress_Message_IE_History=IE History", "Progress_Message_IE_History=IE History",
@ -83,18 +82,18 @@ class ExtractIE extends Extract {
"Progress_Message_IE_AutoFill=IE Auto Fill", "Progress_Message_IE_AutoFill=IE Auto Fill",
"Progress_Message_IE_Logins=IE Logins",}) "Progress_Message_IE_Logins=IE Logins",})
ExtractIE() { ExtractIE(IngestJobContext context) {
super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text")); super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"), context);
JAVA_PATH = PlatformUtil.getJavaPath(); JAVA_PATH = PlatformUtil.getJavaPath();
this.context = context;
} }
@Override @Override
public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), "IE", context.getJobId()); String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), "IE", context.getJobId());
String moduleTempResultsDir = Paths.get(moduleTempDir, "results").toString(); String moduleTempResultsDir = Paths.get(moduleTempDir, "results").toString();
this.dataSource = dataSource; this.dataSource = dataSource;
this.context = context;
dataFound = false; dataFound = false;
progressBar.progress(Bundle.Progress_Message_IE_Bookmarks()); progressBar.progress(Bundle.Progress_Message_IE_Bookmarks());
@ -127,7 +126,7 @@ class ExtractIE extends Extract {
logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); //NON-NLS logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getBookmark.errMsg.errGettingBookmarks", NbBundle.getMessage(this.getClass(), "ExtractIE.getBookmark.errMsg.errGettingBookmarks",
this.getName())); this.getDisplayName()));
return; return;
} }
@ -171,7 +170,7 @@ class ExtractIE extends Extract {
} }
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, fav, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId()), ex); logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId()), ex);
} }
@ -199,12 +198,12 @@ class ExtractIE extends Extract {
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); //NON-NLS logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getName(), NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getDisplayName(),
fav.getName())); fav.getName()));
} catch (IndexOutOfBoundsException ex) { } catch (IndexOutOfBoundsException ex) {
logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); //NON-NLS logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getName(), NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getDisplayName(),
fav.getName())); fav.getName()));
} finally { } finally {
try { try {
@ -228,7 +227,7 @@ class ExtractIE extends Extract {
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error getting cookie files for IE"); //NON-NLS logger.log(Level.WARNING, "Error getting cookie files for IE"); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getName())); NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getDisplayName()));
return; return;
} }
@ -254,7 +253,7 @@ class ExtractIE extends Extract {
logger.log(Level.WARNING, "Error reading bytes of Internet Explorer cookie.", ex); //NON-NLS logger.log(Level.WARNING, "Error reading bytes of Internet Explorer cookie.", ex); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errReadingIECookie", NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errReadingIECookie",
this.getName(), cookiesFile.getName())); this.getDisplayName(), cookiesFile.getName()));
continue; continue;
} }
String cookieString = new String(t); String cookieString = new String(t);
@ -285,9 +284,9 @@ class ExtractIE extends Extract {
} }
try { try {
bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId() ), ex); logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId()), ex);
} }
} }
@ -298,8 +297,10 @@ class ExtractIE extends Extract {
/** /**
* Locates index.dat files, runs Pasco on them, and creates artifacts. * Locates index.dat files, runs Pasco on them, and creates artifacts.
*
* @param moduleTempDir The path to the module temp directory. * @param moduleTempDir The path to the module temp directory.
* @param moduleTempResultsDir The path to the module temp results directory. * @param moduleTempResultsDir The path to the module temp results
* directory.
*/ */
private void getHistory(String moduleTempDir, String moduleTempResultsDir) { private void getHistory(String moduleTempDir, String moduleTempResultsDir) {
logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); //NON-NLS logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); //NON-NLS
@ -308,7 +309,7 @@ class ExtractIE extends Extract {
final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); //NON-NLS final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); //NON-NLS
if (pascoRoot == null) { if (pascoRoot == null) {
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getName())); NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getDisplayName()));
logger.log(Level.SEVERE, "Error finding pasco program "); //NON-NLS logger.log(Level.SEVERE, "Error finding pasco program "); //NON-NLS
return; return;
} }
@ -329,7 +330,7 @@ class ExtractIE extends Extract {
indexFiles = fileManager.findFiles(dataSource, "index.dat"); //NON-NLS indexFiles = fileManager.findFiles(dataSource, "index.dat"); //NON-NLS
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errGettingHistFiles", this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errGettingHistFiles",
this.getName())); this.getDisplayName()));
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); //NON-NLS logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); //NON-NLS
return; return;
} }
@ -363,7 +364,7 @@ class ExtractIE extends Extract {
} catch (IOException e) { } catch (IOException e) {
logger.log(Level.WARNING, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); //NON-NLS logger.log(Level.WARNING, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getName(), NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getDisplayName(),
datFile.getAbsolutePath())); datFile.getAbsolutePath()));
continue; continue;
} }
@ -391,7 +392,7 @@ class ExtractIE extends Extract {
} else { } else {
logger.log(Level.WARNING, "pasco execution failed on: {0}", filename); //NON-NLS logger.log(Level.WARNING, "pasco execution failed on: {0}", filename); //NON-NLS
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getName())); NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getDisplayName()));
} }
} }
@ -411,8 +412,7 @@ class ExtractIE extends Extract {
*/ */
@Messages({ @Messages({
"# {0} - sub module name", "# {0} - sub module name",
"ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history", "ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history",})
})
private boolean executePasco(String indexFilePath, String outputFileName, String moduleTempResultsDir) { private boolean executePasco(String indexFilePath, String outputFileName, String moduleTempResultsDir) {
boolean success = true; boolean success = true;
try { try {
@ -443,7 +443,7 @@ class ExtractIE extends Extract {
// @@@ Investigate use of history versus cache as type. // @@@ Investigate use of history versus cache as type.
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.SEVERE, "Error executing Pasco to process Internet Explorer web history", ex); //NON-NLS logger.log(Level.SEVERE, "Error executing Pasco to process Internet Explorer web history", ex); //NON-NLS
addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getName())); addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getDisplayName()));
success = false; success = false;
} }
return success; return success;
@ -467,7 +467,7 @@ class ExtractIE extends Extract {
File file = new File(fnAbs); File file = new File(fnAbs);
if (file.exists() == false) { if (file.exists() == false) {
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getName(), NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getDisplayName(),
file.getName())); file.getName()));
logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath()); //NON-NLS logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath()); //NON-NLS
return bbartifacts; return bbartifacts;
@ -484,7 +484,7 @@ class ExtractIE extends Extract {
fileScanner = new Scanner(new FileInputStream(file.toString())); fileScanner = new Scanner(new FileInputStream(file.toString()));
} catch (FileNotFoundException ex) { } catch (FileNotFoundException ex) {
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getName(), NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getDisplayName(),
file.getName())); file.getName()));
logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); //NON-NLS logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); //NON-NLS
return bbartifacts; return bbartifacts;
@ -562,7 +562,7 @@ class ExtractIE extends Extract {
} catch (ParseException e) { } catch (ParseException e) {
this.addErrorMessage( this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsingEntry", NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsingEntry",
this.getName())); this.getDisplayName()));
logger.log(Level.WARNING, String.format("Error parsing Pasco results, may have partial processing of corrupt file (id=%d)", origFile.getId()), e); //NON-NLS logger.log(Level.WARNING, String.format("Error parsing Pasco results, may have partial processing of corrupt file (id=%d)", origFile.getId()), e); //NON-NLS
} }
} }
@ -589,9 +589,9 @@ class ExtractIE extends Extract {
RecentActivityExtracterModuleFactory.getModuleName(), user)); RecentActivityExtracterModuleFactory.getModuleName(), user));
try { try {
bbartifacts.add(createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, bbattributes)); bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, bbattributes));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_HISTORY.getDisplayName(), origFile.getId() ), ex); logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_HISTORY.getDisplayName(), origFile.getId()), ex);
} }
} }
fileScanner.close(); fileScanner.close();

View File

@ -53,16 +53,13 @@ import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
/** /**
* Extract the LNK files from the jumplists and save them to ModuleOutput\RecentActivity\Jumplists * Extract the LNK files from the jumplists and save them to
* and then add them back into the case as a dervived file. * ModuleOutput\RecentActivity\Jumplists and then add them back into the case as
* a dervived file.
*/ */
final class ExtractJumpLists extends Extract { final class ExtractJumpLists extends Extract {
private static final Logger logger = Logger.getLogger(ExtractJumpLists.class.getName()); private static final Logger logger = Logger.getLogger(ExtractJumpLists.class.getName());
private IngestJobContext context;
private static final String JUMPLIST_TSK_COMMENT = "Jumplist File";
private static final String RA_DIR_NAME = "RecentActivity"; //NON-NLS private static final String RA_DIR_NAME = "RecentActivity"; //NON-NLS
private static final String AUTOMATIC_DESTINATIONS_FILE_DIRECTORY = "%/AppData/Roaming/Microsoft/Windows/Recent/AutomaticDestinations/"; private static final String AUTOMATIC_DESTINATIONS_FILE_DIRECTORY = "%/AppData/Roaming/Microsoft/Windows/Recent/AutomaticDestinations/";
private static final String JUMPLIST_DIR_NAME = "jumplists"; //NON-NLS private static final String JUMPLIST_DIR_NAME = "jumplists"; //NON-NLS
@ -70,26 +67,25 @@ final class ExtractJumpLists extends Extract {
private String moduleName; private String moduleName;
private FileManager fileManager; private FileManager fileManager;
private final IngestServices services = IngestServices.getInstance(); private final IngestServices services = IngestServices.getInstance();
private final IngestJobContext context;
@Messages({ @Messages({
"Jumplist_module_name=Windows Jumplist Extractor", "Jumplist_module_name=Windows Jumplist Analyzer",
"Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis." "Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis."
}) })
ExtractJumpLists() { ExtractJumpLists(IngestJobContext context) {
super(Bundle.Jumplist_module_name()); super(Bundle.Jumplist_module_name(), context);
this.context = context;
} }
@Override @Override
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.context = context;
moduleName = Bundle.Jumplist_module_name(); moduleName = Bundle.Jumplist_module_name();
fileManager = currentCase.getServices().getFileManager(); fileManager = currentCase.getServices().getFileManager();
long ingestJobId = context.getJobId(); long ingestJobId = context.getJobId();
String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId); String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId);
List<AbstractFile> jumpListFiles = extractJumplistFiles(dataSource, ingestJobId, baseRaTempPath); List<AbstractFile> jumpListFiles = extractJumplistFiles(dataSource, ingestJobId, baseRaTempPath);
if (jumpListFiles.isEmpty()) { if (jumpListFiles.isEmpty()) {
return; return;
} }
@ -102,8 +98,8 @@ final class ExtractJumpLists extends Extract {
String derivedPath = null; String derivedPath = null;
String baseRaModPath = RAImageIngestModule.getRAOutputPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId); String baseRaModPath = RAImageIngestModule.getRAOutputPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId);
for (AbstractFile jumplistFile : jumpListFiles) { for (AbstractFile jumplistFile : jumpListFiles) {
if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..") && if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..")
!jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) { && !jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) {
String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString(); String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString();
String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId();
@ -121,7 +117,9 @@ final class ExtractJumpLists extends Extract {
// notify listeners of new files and schedule for analysis // notify listeners of new files and schedule for analysis
progressBar.progress(String.format(Bundle.Jumplist_adding_extracted_files_msg(), derivedFiles.size())); progressBar.progress(String.format(Bundle.Jumplist_adding_extracted_files_msg(), derivedFiles.size()));
derivedFiles.forEach((derived) -> { services.fireModuleContentEvent(new ModuleContentEvent(derived)); }); derivedFiles.forEach((derived) -> {
services.fireModuleContentEvent(new ModuleContentEvent(derived));
});
context.addFilesToJob(derivedFiles); context.addFilesToJob(derivedFiles);
} }
@ -153,8 +151,8 @@ final class ExtractJumpLists extends Extract {
return jumpListFiles; return jumpListFiles;
} }
if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..") && if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..")
!jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) { && !jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) {
String fileName = jumpListFile.getName() + "_" + jumpListFile.getId(); String fileName = jumpListFile.getName() + "_" + jumpListFile.getId();
String jlFile = Paths.get(baseRaTempPath, fileName).toString(); String jlFile = Paths.get(baseRaTempPath, fileName).toString();
try { try {
@ -245,4 +243,3 @@ final class ExtractJumpLists extends Extract {
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2019-2021 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -26,7 +26,6 @@ import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
@ -40,7 +39,7 @@ import org.sleuthkit.datamodel.TskCoreException;
* Create OS INFO artifacts for the Operating Systems believed to be present on * Create OS INFO artifacts for the Operating Systems believed to be present on
* the data source. * the data source.
*/ */
@Messages({"ExtractOs.parentModuleName=Recent Activity", @Messages({"ExtractOs.displayName=OS Info Analyzer",
"ExtractOS_progressMessage=Checking for OS"}) "ExtractOS_progressMessage=Checking for OS"})
class ExtractOs extends Extract { class ExtractOs extends Extract {
@ -64,9 +63,15 @@ class ExtractOs extends Extract {
private static final String LINUX_UBUNTU_PATH = "/etc/lsb-release"; private static final String LINUX_UBUNTU_PATH = "/etc/lsb-release";
private Content dataSource; private Content dataSource;
private final IngestJobContext context;
ExtractOs(IngestJobContext context) {
super(Bundle.ExtractOs_displayName(), context);
this.context = context;
}
@Override @Override
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.dataSource = dataSource; this.dataSource = dataSource;
try { try {
progressBar.progress(Bundle.ExtractOS_progressMessage()); progressBar.progress(Bundle.ExtractOS_progressMessage());
@ -100,9 +105,9 @@ class ExtractOs extends Extract {
//if the os info program name is not empty create an os info artifact on the first of the files found //if the os info program name is not empty create an os info artifact on the first of the files found
Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME,
Bundle.ExtractOs_parentModuleName(), getRAModuleName(),
osType.getOsInfoLabel())); //NON-NLS osType.getOsInfoLabel())); //NON-NLS
postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_INFO, file, bbattributes)); postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, file, bbattributes));
} }
} }

View File

@ -65,8 +65,7 @@ final class ExtractPrefetch extends Extract {
private static final Logger logger = Logger.getLogger(ExtractPrefetch.class.getName()); private static final Logger logger = Logger.getLogger(ExtractPrefetch.class.getName());
private IngestJobContext context; private final IngestJobContext context;
private static final String PREFETCH_TSK_COMMENT = "Prefetch File"; private static final String PREFETCH_TSK_COMMENT = "Prefetch File";
private static final String PREFETCH_FILE_LOCATION = "/windows/prefetch"; private static final String PREFETCH_FILE_LOCATION = "/windows/prefetch";
private static final String PREFETCH_TOOL_FOLDER = "markmckinnon"; //NON-NLS private static final String PREFETCH_TOOL_FOLDER = "markmckinnon"; //NON-NLS
@ -80,18 +79,20 @@ final class ExtractPrefetch extends Extract {
private static final String PREFETCH_DIR_NAME = "prefetch"; //NON-NLS private static final String PREFETCH_DIR_NAME = "prefetch"; //NON-NLS
@Messages({ @Messages({
"ExtractPrefetch_module_name=Windows Prefetch Extractor", "ExtractPrefetch_module_name=Windows Prefetch Analyzer",
"# {0} - sub module name", "# {0} - sub module name",
"ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files" "ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files"
}) })
ExtractPrefetch() { ExtractPrefetch(IngestJobContext context) {
super(Bundle.ExtractPrefetch_module_name()); super(Bundle.ExtractPrefetch_module_name(), context);
this.context = context;
} }
/** /**
* Get the temp folder name. * Get the temp folder name.
* *
* @param dataSource Current data source * @param dataSource Current data source
*
* @return The folder name * @return The folder name
*/ */
private String getPrefetchTempFolder(Content dataSource) { private String getPrefetchTempFolder(Content dataSource) {
@ -99,9 +100,8 @@ final class ExtractPrefetch extends Extract {
} }
@Override @Override
void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { void process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
this.context = context;
long ingestJobId = context.getJobId(); long ingestJobId = context.getJobId();
String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + PREFETCH_DIR_NAME; String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + PREFETCH_DIR_NAME;
@ -290,12 +290,13 @@ final class ExtractPrefetch extends Extract {
continue; continue;
} }
/** /**
* A prefetch file is created when a program is run and the superfetch service collected data about the first 10 * A prefetch file is created when a program is run and the
* seconds of the run, the trace data is then written to a new prefetch file or merged with an existing prefetch file. * superfetch service collected data about the first 10 seconds
* If the prefetch file gets deleted for some reason then a new one will be created. See 7500 in JIRA for more * of the run, the trace data is then written to a new prefetch
* information. * file or merged with an existing prefetch file. If the
* prefetch file gets deleted for some reason then a new one
* will be created. See 7500 in JIRA for more information.
*/ */
AbstractFile pfAbstractFile = null; AbstractFile pfAbstractFile = null;
try { try {
@ -313,20 +314,20 @@ final class ExtractPrefetch extends Extract {
// only add prefetch file entries that have an actual date associated with them // only add prefetch file entries that have an actual date associated with them
Collection<BlackboardAttribute> blkBrdAttributes = Arrays.asList( Collection<BlackboardAttribute> blkBrdAttributes = Arrays.asList(
new BlackboardAttribute( new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getDisplayName(),
applicationName),//NON-NLS applicationName),//NON-NLS
new BlackboardAttribute( new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getName(), filePath), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getDisplayName(), filePath),
new BlackboardAttribute( new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getDisplayName(),
executionTime), executionTime),
new BlackboardAttribute( new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, getName(), Integer.valueOf(timesProgramRun)), BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, getDisplayName(), Integer.valueOf(timesProgramRun)),
new BlackboardAttribute( new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), PREFETCH_TSK_COMMENT)); BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getDisplayName(), PREFETCH_TSK_COMMENT));
try { try {
BlackboardArtifact blkBrdArt = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, pfAbstractFile, blkBrdAttributes); BlackboardArtifact blkBrdArt = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_PROG_RUN, pfAbstractFile, blkBrdAttributes);
blkBrdArtList.add(blkBrdArt); blkBrdArtList.add(blkBrdArt);
BlackboardArtifact associatedBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), filePath, blkBrdArt, dataSource); BlackboardArtifact associatedBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), filePath, blkBrdArt, dataSource);
if (associatedBbArtifact != null) { if (associatedBbArtifact != null) {

Some files were not shown because too many files have changed in this diff Show More