diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java index a1289a1b85..28065dacbc 100644 --- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java +++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2015-2019 Basis Technology Corp. + * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -56,7 +56,7 @@ public final class Blackboard implements Closeable { @Deprecated public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException { try { - Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, ""); + Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "", null); } catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) { throw new BlackboardException(ex.getMessage(), ex); } @@ -117,6 +117,7 @@ public final class Blackboard implements Closeable { * @deprecated Do not use. */ @Deprecated + @Override public void close() throws IOException { /* * No-op maintained for backwards compatibility. Clients should not diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java index 85dc90ea42..b1cac61e4c 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/CaseEventListener.java @@ -761,8 +761,7 @@ public final class CaseEventListener implements PropertyChangeListener { BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, score, null, Bundle.CaseEventsListener_prevExists_text(), justification, attributesForNewArtifact, osAccountInstance.getDataSource().getId()).getAnalysisResult(); try { - // index the artifact for keyword search - blackboard.postArtifact(newAnalysisResult, MODULE_NAME); + blackboard.postArtifact(newAnalysisResult, MODULE_NAME, null); break; } catch (Blackboard.BlackboardException ex) { LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newAnalysisResult.getArtifactID(), ex); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java index a036d22073..6af51f69af 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java @@ -87,7 +87,7 @@ public class IngestEventsListener { private final PropertyChangeListener pcl1 = new IngestModuleEventListener(); private final PropertyChangeListener pcl2 = new IngestJobEventListener(); final Collection recentlyAddedCeArtifacts = new LinkedHashSet<>(); - + static final int MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE = 10; static final int MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION = 20; @@ -195,7 +195,7 @@ public class IngestEventsListener { public synchronized static void setFlagSeenDevices(boolean value) { flagSeenDevices = value; } - + /** * Configure the listener to flag unique apps or not. * @@ -204,7 +204,7 @@ public class IngestEventsListener { public synchronized static void setFlagUniqueArtifacts(boolean value) { flagUniqueArtifacts = value; } - + /** * Are unique apps being flagged? * @@ -256,10 +256,12 @@ public class IngestEventsListener { } /** - * Create a "previously seen" hit for a device which was previously seen - * in the central repository. NOTE: Artifacts that are too common will be skipped. + * Create a "previously seen" hit for a device which was previously seen in + * the central repository. NOTE: Artifacts that are too common will be + * skipped. * - * @param originalArtifact the artifact to create the "previously seen" item for + * @param originalArtifact the artifact to create the "previously seen" item + * for * @param caseDisplayNames the case names the artifact was previously seen * in * @param aType The correlation type. @@ -271,19 +273,19 @@ public class IngestEventsListener { "IngestEventsListener.prevCount.text=Number of previous {0}: {1}"}) static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames, CorrelationAttributeInstance.Type aType, String value) { - + // calculate score Score score; int numCases = caseDisplayNames.size(); if (numCases <= MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE) { score = Score.SCORE_LIKELY_NOTABLE; } else if (numCases > MAX_NUM_PREVIOUS_CASES_FOR_LIKELY_NOTABLE_SCORE && numCases <= MAX_NUM_PREVIOUS_CASES_FOR_PREV_SEEN_ARTIFACT_CREATION) { - score = Score.SCORE_NONE; + score = Score.SCORE_NONE; } else { // don't make an Analysis Result, the artifact is too common. return; } - + String prevCases = caseDisplayNames.stream().distinct().collect(Collectors.joining(",")); String justification = "Previously seen in cases " + prevCases; Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute( @@ -297,40 +299,42 @@ public class IngestEventsListener { value), new BlackboardAttribute( TSK_OTHER_CASES, MODULE_NAME, - prevCases)); + prevCases)); makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_SEEN, originalArtifact, attributesForNewArtifact, Bundle.IngestEventsListener_prevExists_text(), score, justification); } - + /** - * Create a "previously unseen" hit for an application which was never seen in - * the central repository. + * Create a "previously unseen" hit for an application which was never seen + * in the central repository. * - * @param originalArtifact the artifact to create the "previously unseen" item - * for + * @param originalArtifact the artifact to create the "previously unseen" + * item for * @param aType The correlation type. * @param value The correlation value. */ static private void makeAndPostPreviouslyUnseenArtifact(BlackboardArtifact originalArtifact, CorrelationAttributeInstance.Type aType, String value) { - Collection attributesForNewArtifact = Arrays.asList( + Collection attributesForNewArtifact = Arrays.asList( new BlackboardAttribute( - TSK_CORRELATION_TYPE, MODULE_NAME, - aType.getDisplayName()), + TSK_CORRELATION_TYPE, MODULE_NAME, + aType.getDisplayName()), new BlackboardAttribute( - TSK_CORRELATION_VALUE, MODULE_NAME, - value)); + TSK_CORRELATION_VALUE, MODULE_NAME, + value)); makeAndPostArtifact(BlackboardArtifact.Type.TSK_PREVIOUSLY_UNSEEN, originalArtifact, attributesForNewArtifact, "", Score.SCORE_LIKELY_NOTABLE, "This application has not been previously seen before"); - } - + } + /** * Make an artifact to flag the passed in artifact. * * @param newArtifactType Type of artifact to create. * @param originalArtifact Artifact in current case we want to flag * @param attributesForNewArtifact Attributes to assign to the new artifact - * @param configuration The configuration to be specified for the new artifact hit - * @param score sleuthkit.datamodel.Score to be assigned to this artifact + * @param configuration The configuration to be specified for the + * new artifact hit + * @param score sleuthkit.datamodel.Score to be assigned + * to this artifact * @param justification Justification string */ private static void makeAndPostArtifact(BlackboardArtifact.Type newArtifactType, BlackboardArtifact originalArtifact, Collection attributesForNewArtifact, String configuration, @@ -341,14 +345,13 @@ public class IngestEventsListener { // Create artifact if it doesn't already exist. BlackboardArtifact.ARTIFACT_TYPE type = BlackboardArtifact.ARTIFACT_TYPE.fromID(newArtifactType.getTypeID()); if (!blackboard.artifactExists(originalArtifact, type, attributesForNewArtifact)) { - BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult( - newArtifactType, score, + BlackboardArtifact newArtifact = originalArtifact.newAnalysisResult( + newArtifactType, score, null, configuration, justification, attributesForNewArtifact) .getAnalysisResult(); try { - // index the artifact for keyword search - blackboard.postArtifact(newArtifact, MODULE_NAME); + blackboard.postArtifact(newArtifact, MODULE_NAME, null); } catch (Blackboard.BlackboardException ex) { LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newArtifact.getArtifactID(), ex); //NON-NLS } @@ -549,14 +552,14 @@ public class IngestEventsListener { for (BlackboardArtifact bbArtifact : bbArtifacts) { // makeCorrAttrToSave will filter out artifacts which should not be sources of CR data. List convertedArtifacts = new ArrayList<>(); - if (bbArtifact instanceof DataArtifact){ - convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact)bbArtifact)); - } + if (bbArtifact instanceof DataArtifact) { + convertedArtifacts.addAll(CorrelationAttributeUtil.makeCorrAttrsToSave((DataArtifact) bbArtifact)); + } for (CorrelationAttributeInstance eamArtifact : convertedArtifacts) { try { // Only do something with this artifact if it's unique within the job if (recentlyAddedCeArtifacts.add(eamArtifact.toString())) { - + // Get a list of instances for a given value (hash, email, etc.) List previousOccurrences = new ArrayList<>(); // check if we are flagging things @@ -591,7 +594,7 @@ public class IngestEventsListener { continue; } } - + // flag previously seen devices and communication accounts (emails, phones, etc) if (flagPreviousItemsEnabled && !previousOccurrences.isEmpty() && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID @@ -605,12 +608,12 @@ public class IngestEventsListener { List caseDisplayNames = getCaseDisplayNames(previousOccurrences); makeAndPostPreviousSeenArtifact(bbArtifact, caseDisplayNames, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); } - + // flag previously unseen apps and domains if (flagUniqueItemsEnabled && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.INSTALLED_PROGS_TYPE_ID || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.DOMAIN_TYPE_ID)) { - + if (previousOccurrences.isEmpty()) { makeAndPostPreviouslyUnseenArtifact(bbArtifact, eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue()); } @@ -635,7 +638,7 @@ public class IngestEventsListener { } // DATA_ADDED } } - + /** * Gets case display names for a list of CorrelationAttributeInstance. * @@ -666,5 +669,5 @@ public class IngestEventsListener { } } return caseNames; - } + } } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java index 49be6d3d6b..df6bd58801 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java @@ -87,6 +87,7 @@ final class CentralRepoIngestModule implements FileIngestModule { private Blackboard blackboard; private final boolean createCorrelationProperties; private final boolean flagUniqueArtifacts; + private IngestJobContext context; /** * Instantiate the Central Repository ingest module. @@ -229,6 +230,8 @@ final class CentralRepoIngestModule implements FileIngestModule { }) @Override public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; + IngestEventsListener.incrementCorrelationEngineModuleCount(); /* @@ -256,7 +259,7 @@ final class CentralRepoIngestModule implements FileIngestModule { } if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagUniqueArtifacts()) { IngestEventsListener.setFlagUniqueArtifacts(flagUniqueArtifacts); - } + } if (CentralRepository.isEnabled() == false) { /* @@ -360,12 +363,12 @@ final class CentralRepoIngestModule implements FileIngestModule { // Create artifact if it doesn't already exist. if (!blackboard.artifactExists(abstractFile, TSK_PREVIOUSLY_NOTABLE, attributes)) { BlackboardArtifact tifArtifact = abstractFile.newAnalysisResult( - BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE, + BlackboardArtifact.Type.TSK_PREVIOUSLY_NOTABLE, Score.SCORE_NOTABLE, null, Bundle.CentralRepoIngestModule_prevTaggedSet_text(), justification, attributes) .getAnalysisResult(); try { // index the artifact for keyword search - blackboard.postArtifact(tifArtifact, MODULE_NAME); + blackboard.postArtifact(tifArtifact, MODULE_NAME, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java index 39c80abefc..ddece9731c 100644 --- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java +++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModuleFactory.java @@ -64,7 +64,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { return Version.getVersion(); } - @Override + @Override public boolean isFileIngestModuleFactory() { return true; } @@ -83,7 +83,7 @@ public class CentralRepoIngestModuleFactory extends IngestModuleFactoryAdapter { } throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings"); } - + @Override public boolean hasGlobalSettingsPanel() { return true; diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java index e1181f80cd..c65b92b8d3 100644 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/ContactArtifactViewer.java @@ -148,7 +148,7 @@ public class ContactArtifactViewer extends javax.swing.JPanel implements Artifac @Override public Component getComponent() { // Slap a vertical scrollbar on the panel. - return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_NEVER); + return new JScrollPane(this, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED, JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageArtifactWorker.java b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageArtifactWorker.java index 14d06dd805..909d41c08b 100755 --- a/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageArtifactWorker.java +++ b/Core/src/org/sleuthkit/autopsy/contentviewers/artifactviewers/MessageArtifactWorker.java @@ -149,7 +149,8 @@ class MessageArtifactWorker extends SwingWorker getAssociatedArtifact(final BlackboardArtifact artifact) throws TskCoreException { BlackboardAttribute attribute = artifact.getAttribute(TSK_ASSOCIATED_TYPE); if (attribute != null) { - return Optional.of(artifact.getSleuthkitCase().getArtifactByArtifactId(attribute.getValueLong())); + //in the context of the Message content viewer the associated artifact will always be a data artifact + return Optional.of(artifact.getSleuthkitCase().getBlackboard().getDataArtifactById(attribute.getValueLong())); } return Optional.empty(); } diff --git a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYCallsFileParser.java b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYCallsFileParser.java index 87d9fee141..2e3bb1d8d7 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYCallsFileParser.java +++ b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYCallsFileParser.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +24,6 @@ import java.util.Collection; import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -188,10 +187,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { switch (xryKey) { case TEL: case NUMBER: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + // Apply namespace or direction if (xryNamespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) { callerId = pair.getValue(); @@ -206,30 +205,30 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { // Although confusing, as these are also 'name spaces', it appears // later versions of XRY just made these standardized lines. case TO: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + calleeList.add(pair.getValue()); break; case FROM: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + callerId = pair.getValue(); break; case TIME: try { - //Tranform value to seconds since epoch - long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue()); - startTime = dateTimeSinceEpoch; - } catch (DateTimeParseException ex) { - logger.log(Level.WARNING, String.format("[XRY DSP] Assumption" - + " about the date time formatting of call logs is " - + "not right. Here is the value [ %s ]", pair.getValue()), ex); - } - break; + //Tranform value to seconds since epoch + long dateTimeSinceEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue()); + startTime = dateTimeSinceEpoch; + } catch (DateTimeParseException ex) { + logger.log(Level.WARNING, String.format("[XRY DSP] Assumption" + + " about the date time formatting of call logs is " + + "not right. Here is the value [ %s ]", pair.getValue()), ex); + } + break; case DIRECTION: String directionString = pair.getValue().toLowerCase(); if (directionString.equals("incoming")) { @@ -263,7 +262,6 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { // Make sure we have the required fields, otherwise the CommHelper will // complain about illegal arguments. - // These are all the invalid combinations. if (callerId == null && calleeList.isEmpty() || direction == CommunicationDirection.INCOMING && callerId == null @@ -288,10 +286,10 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { // it would have been a valid combination. if (callerId != null) { try { - currentCase.getCommunicationsManager().createAccountFileInstance( - Account.Type.PHONE, callerId, PARSER_NAME, parent); + currentCase.getCommunicationsManager().createAccountFileInstance( + Account.Type.PHONE, callerId, PARSER_NAME, parent, null); } catch (InvalidAccountIDException ex) { - logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex); + logger.log(Level.WARNING, String.format("Invalid account identifier %s", callerId), ex); } otherAttributes.add(new BlackboardAttribute( @@ -301,12 +299,11 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { for (String phone : calleeList) { try { - currentCase.getCommunicationsManager().createAccountFileInstance( - Account.Type.PHONE, phone, PARSER_NAME, parent); + currentCase.getCommunicationsManager().createAccountFileInstance( + Account.Type.PHONE, phone, PARSER_NAME, parent, null); } catch (InvalidAccountIDException ex) { logger.log(Level.WARNING, String.format("Invalid account identifier %s", phone), ex); } - otherAttributes.add(new BlackboardAttribute( BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, @@ -315,17 +312,17 @@ final class XRYCallsFileParser extends AbstractSingleEntityParser { if (!otherAttributes.isEmpty()) { BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), otherAttributes); - - currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME); + + currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null); } } else { // Otherwise we can safely use the helper. CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( - currentCase, PARSER_NAME, parent, Account.Type.PHONE); + currentCase, PARSER_NAME, parent, Account.Type.PHONE, null); helper.addCalllog(direction, callerId, calleeList, startTime, endTime, callType, otherAttributes); } } -} \ No newline at end of file +} diff --git a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYContactsFileParser.java b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYContactsFileParser.java index 585de02389..a961abfe21 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYContactsFileParser.java +++ b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYContactsFileParser.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -24,7 +24,6 @@ import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; import static org.sleuthkit.autopsy.datasourceprocessors.xry.AbstractSingleEntityParser.PARSER_NAME; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Account; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -136,7 +135,7 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser { // complain about illegal arguments. if (phoneNumber != null || homePhoneNumber != null || mobilePhoneNumber != null || hasAnEmail) { CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( - currentCase, PARSER_NAME, parent, Account.Type.DEVICE); + currentCase, PARSER_NAME, parent, Account.Type.DEVICE, null); helper.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, additionalAttributes); @@ -144,8 +143,8 @@ final class XRYContactsFileParser extends AbstractSingleEntityParser { // Just create an artifact with the attributes that we do have. if (!additionalAttributes.isEmpty()) { BlackboardArtifact artifact = parent.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), additionalAttributes); - - currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME); + + currentCase.getBlackboard().postArtifact(artifact, PARSER_NAME, null); } } } diff --git a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYMessagesFileParser.java b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYMessagesFileParser.java index de56bb2d47..c51deab219 100755 --- a/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYMessagesFileParser.java +++ b/Core/src/org/sleuthkit/autopsy/datasourceprocessors/xry/XRYMessagesFileParser.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -95,6 +95,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * Indicates if the display name of the XRY key is a recognized type. * * @param name + * * @return */ public static boolean contains(String name) { @@ -114,6 +115,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * contains() before hand. * * @param name + * * @return */ public static XryKey fromDisplayName(String name) { @@ -149,6 +151,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * type. * * @param xryNamespace + * * @return */ public static boolean contains(String xryNamespace) { @@ -169,6 +172,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * contains() before hand. * * @param xryNamespace + * * @return */ public static XryNamespace fromDisplayName(String xryNamespace) { @@ -206,6 +210,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * Indicates if the display name of the XRY key is a recognized type. * * @param name + * * @return */ public static boolean contains(String name) { @@ -225,6 +230,7 @@ final class XRYMessagesFileParser implements XRYFileParser { * contains() before hand. * * @param name + * * @return */ public static XryMetaKey fromDisplayName(String name) { @@ -253,11 +259,13 @@ final class XRYMessagesFileParser implements XRYFileParser { * assumption is correct, otherwise an error will appear in the logs. * * @param reader The XRYFileReader that reads XRY entities from the - * Message-SMS report. + * Message-SMS report. * @param parent The parent Content to create artifacts from. - * @throws IOException If an I/O error is encountered during report reading + * + * @throws IOException If an I/O error is encountered during report + * reading * @throws TskCoreException If an error during artifact creation is - * encountered. + * encountered. */ @Override public void parse(XRYFileReader reader, Content parent, SleuthkitCase currentCase) throws IOException, TskCoreException, BlackboardException { @@ -270,10 +278,10 @@ final class XRYMessagesFileParser implements XRYFileParser { while (reader.hasNextEntity()) { String xryEntity = reader.nextEntity(); - + // This call will combine all segmented text into a single key value pair List pairs = getXRYKeyValuePairs(xryEntity, reader, referenceNumbersSeen); - + // Transform all the data from XRY land into the appropriate CommHelper // data types. final String messageType = PARSER_NAME; @@ -286,8 +294,8 @@ final class XRYMessagesFileParser implements XRYFileParser { String text = null; final String threadId = null; final Collection otherAttributes = new ArrayList<>(); - - for(XRYKeyValuePair pair : pairs) { + + for (XRYKeyValuePair pair : pairs) { XryNamespace namespace = XryNamespace.NONE; if (XryNamespace.contains(pair.getNamespace())) { namespace = XryNamespace.fromDisplayName(pair.getNamespace()); @@ -298,55 +306,55 @@ final class XRYMessagesFileParser implements XRYFileParser { switch (key) { case TEL: case NUMBER: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + // Apply namespace or direction - if(namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) { + if (namespace == XryNamespace.FROM || direction == CommunicationDirection.INCOMING) { senderId = pair.getValue(); - } else if(namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) { + } else if (namespace == XryNamespace.TO || direction == CommunicationDirection.OUTGOING) { recipientIdsList.add(pair.getValue()); } else { try { currentCase.getCommunicationsManager().createAccountFileInstance( - Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent); + Account.Type.PHONE, pair.getValue(), PARSER_NAME, parent, null); } catch (InvalidAccountIDException ex) { logger.log(Level.WARNING, String.format("Invalid account identifier %s", pair.getValue()), ex); } otherAttributes.add(new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, - PARSER_NAME, pair.getValue())); + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, + PARSER_NAME, pair.getValue())); } break; // Although confusing, as these are also 'name spaces', it appears // later versions of XRY just made these standardized lines. case FROM: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + senderId = pair.getValue(); break; case TO: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - + recipientIdsList.add(pair.getValue()); break; case TIME: try { - //Tranform value to seconds since epoch - long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue()); - dateTime = dateTimeSinceInEpoch; - } catch (DateTimeParseException ex) { - logger.log(Level.WARNING, String.format("[%s] Assumption" - + " about the date time formatting of messages is " - + "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex); - } - break; + //Tranform value to seconds since epoch + long dateTimeSinceInEpoch = XRYUtils.calculateSecondsSinceEpoch(pair.getValue()); + dateTime = dateTimeSinceInEpoch; + } catch (DateTimeParseException ex) { + logger.log(Level.WARNING, String.format("[%s] Assumption" + + " about the date time formatting of messages is " + + "not right. Here is the pair [ %s ]", PARSER_NAME, pair), ex); + } + break; case TYPE: switch (normalizedValue) { case "incoming": @@ -406,11 +414,11 @@ final class XRYMessagesFileParser implements XRYFileParser { } break; case SERVICE_CENTER: - if(!XRYUtils.isPhoneValid(pair.getValue())) { + if (!XRYUtils.isPhoneValid(pair.getValue())) { continue; } - - otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, + + otherAttributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, PARSER_NAME, pair.getValue())); break; default: @@ -427,18 +435,18 @@ final class XRYMessagesFileParser implements XRYFileParser { } } } - - CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( - currentCase, PARSER_NAME, parent, Account.Type.PHONE); - helper.addMessage(messageType, direction, senderId, recipientIdsList, - dateTime, readStatus, subject, text, threadId, otherAttributes); + CommunicationArtifactsHelper helper = new CommunicationArtifactsHelper( + currentCase, PARSER_NAME, parent, Account.Type.PHONE, null); + + helper.addMessage(messageType, direction, senderId, recipientIdsList, + dateTime, readStatus, subject, text, threadId, otherAttributes); } } /** - * Extracts all pairs from the XRY Entity. This function - * will unify any segmented text, if need be. + * Extracts all pairs from the XRY Entity. This function will unify any + * segmented text, if need be. */ private List getXRYKeyValuePairs(String xryEntity, XRYFileReader reader, Set referenceValues) throws IOException { @@ -508,10 +516,13 @@ final class XRYMessagesFileParser implements XRYFileParser { * Builds up segmented message entities so that the text is unified for a * single artifact. * - * @param reader File reader that is producing XRY entities. - * @param referenceNumbersSeen All known references numbers up until this point. - * @param xryEntity The source XRY entity. + * @param reader File reader that is producing XRY entities. + * @param referenceNumbersSeen All known references numbers up until this + * point. + * @param xryEntity The source XRY entity. + * * @return + * * @throws IOException */ private String getSegmentedText(String[] xryEntity, XRYFileReader reader, @@ -604,7 +615,8 @@ final class XRYMessagesFileParser implements XRYFileParser { * Extracts the value of the XRY meta key, if any. * * @param xryLines XRY entity to extract from. - * @param metaKey The key type to extract. + * @param metaKey The key type to extract. + * * @return */ private Optional getMetaKeyValue(String[] xryLines, XryMetaKey metaKey) { @@ -629,10 +641,12 @@ final class XRYMessagesFileParser implements XRYFileParser { /** * Extracts the ith XRY Key Value pair in the XRY Entity. * - * The total number of pairs can be determined via getCountOfKeyValuePairs(). + * The total number of pairs can be determined via + * getCountOfKeyValuePairs(). * * @param xryLines XRY entity. - * @param index The requested Key Value pair. + * @param index The requested Key Value pair. + * * @return */ private Optional getKeyValuePairByIndex(String[] xryLines, int index) { @@ -672,4 +686,4 @@ final class XRYMessagesFileParser implements XRYFileParser { return Optional.empty(); } -} \ No newline at end of file +} diff --git a/Core/src/org/sleuthkit/autopsy/discovery/search/ResultsSorter.java b/Core/src/org/sleuthkit/autopsy/discovery/search/ResultsSorter.java index 97de8c6691..e1d27fda57 100644 --- a/Core/src/org/sleuthkit/autopsy/discovery/search/ResultsSorter.java +++ b/Core/src/org/sleuthkit/autopsy/discovery/search/ResultsSorter.java @@ -388,10 +388,10 @@ public class ResultsSorter implements Comparator { Bundle.FileSorter_SortingMethod_keywordlist_displayName()), // Sort alphabetically by list of keyword list names found BY_FULL_PATH(new ArrayList<>(), Bundle.FileSorter_SortingMethod_fullPath_displayName()), // Sort alphabetically by path - BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()), - BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()), - BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()), - BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName()); + BY_DOMAIN_NAME(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_domain_displayName()), + BY_PAGE_VIEWS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_pageViews_displayName()), + BY_DOWNLOADS(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_downloads_displayName()), + BY_LAST_ACTIVITY(Arrays.asList(new DiscoveryAttributes.DomainCategoryAttribute(), new DiscoveryAttributes.PreviouslyNotableAttribute()), Bundle.FileSorter_SortingMethod_activity_displayName()); private final String displayName; private final List requiredAttributes; diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java index b3042fb4a5..746b76f61e 100644 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java @@ -73,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule { // Skip anything other than actual file system files. if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) - || (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) - || (file.isFile() == false)) { + || (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) + || (file.isFile() == false)) { return IngestModule.ProcessResult.OK; } @@ -111,10 +111,13 @@ class SampleFileIngestModule implements FileIngestModule { addToBlackboardPostCount(context.getJobId(), 1L); /* - * post the artifact which will index the artifact for keyword - * search, and fire an event to notify UI of this new artifact + * Post the artifact to the blackboard. Doing so will cause events + * to be published that will trigger additional analysis, if + * applicable. For example, the creation of timeline events, + * indexing of the artifact for keyword search, and analysis by the + * data artifact ingest modules if the artifact is a data artifact. */ - file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName()); + file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName(), context.getJobId()); return IngestModule.ProcessResult.OK; diff --git a/Core/src/org/sleuthkit/autopsy/guiutils/DurationCellRenderer.java b/Core/src/org/sleuthkit/autopsy/guiutils/DurationCellRenderer.java index 9ab4ec281b..cbc54aadc3 100644 --- a/Core/src/org/sleuthkit/autopsy/guiutils/DurationCellRenderer.java +++ b/Core/src/org/sleuthkit/autopsy/guiutils/DurationCellRenderer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2015-2017 Basis Technology Corp. + * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -21,66 +21,76 @@ package org.sleuthkit.autopsy.guiutils; import java.awt.Component; import java.time.Duration; import javax.swing.JTable; -import static javax.swing.SwingConstants.CENTER; /** * A JTable cell renderer that renders a duration represented as a long as a * string with days, hours, minutes, and seconds components. It center-aligns * cell content and grays out the cell if the table is disabled. */ -public class DurationCellRenderer extends GrayableCellRenderer { +public final class DurationCellRenderer extends GrayableCellRenderer { private static final long serialVersionUID = 1L; + private static final char UNIT_SEPARATOR_CHAR = ':'; public DurationCellRenderer() { - setHorizontalAlignment(CENTER); + setHorizontalAlignment(LEFT); } @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { if (value instanceof Long) { - { - setText(DurationCellRenderer.longToDurationString((long) value)); - } + setText(DurationCellRenderer.longToDurationString((long) value)); } grayCellIfTableNotEnabled(table, isSelected); return this; } + public static char getUnitSeperator() { + return UNIT_SEPARATOR_CHAR; + } + /** * Convert a duration represented by a long to a human readable string with * with days, hours, minutes, and seconds components. * - * @param duration - the representation of the duration in long form + * @param duration - The representation of the duration in long form. * - * @return - the representation of the duration in String form. + * @return - The representation of the duration in String form. */ public static String longToDurationString(long duration) { Duration d = Duration.ofMillis(duration); if (d.isNegative()) { - d = Duration.ofMillis(-duration); + d = Duration.ofMillis(0); //it being 0 for a few seconds seems preferable to it counting down to 0 then back up from 0 } - - String result; long days = d.toDays(); long hours = d.minusDays(days).toHours(); long minutes = d.minusDays(days).minusHours(hours).toMinutes(); long seconds = d.minusDays(days).minusHours(hours).minusMinutes(minutes).getSeconds(); - - if (minutes > 0) { - if (hours > 0) { - if (days > 0) { - result = days + " d " + hours + " h " + minutes + " m " + seconds + " s"; - } else { - result = hours + " h " + minutes + " m " + seconds + " s"; - } - } else { - result = minutes + " m " + seconds + " s"; - } - } else { - result = seconds + " s"; + if (days < 0) { + days = 0; } - return result; + if (hours < 0) { + hours = 0; + } + if (minutes < 0) { + minutes = 0; + } + if (seconds < 0) { + seconds = 0; + } + StringBuilder results = new StringBuilder(12); + if (days < 99) { + results.append(String.format("%02d", days)); + } else { + results.append(days); //in the off chance something has been running for over 99 days lets allow it to stand out a bit by having as many characters as it needs + } + results.append(UNIT_SEPARATOR_CHAR); + results.append(String.format("%02d", hours)); + results.append(UNIT_SEPARATOR_CHAR); + results.append(String.format("%02d", minutes)); + results.append(UNIT_SEPARATOR_CHAR); + results.append(String.format("%02d", seconds)); + return results.toString(); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataArtifactIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/DataArtifactIngestPipeline.java index 824d7d7fe9..865b29cb59 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataArtifactIngestPipeline.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataArtifactIngestPipeline.java @@ -23,27 +23,28 @@ import java.util.Optional; import org.sleuthkit.datamodel.DataArtifact; /** - * A pipeline of data artifact ingest modules used to execute data artifact + * A pipeline of data artifact ingest modules used to perform data artifact * ingest tasks for an ingest job. */ -final class DataArtifactIngestPipeline extends IngestTaskPipeline { +final class DataArtifactIngestPipeline extends IngestPipeline { /** - * Constructs a pipeline of data artifact ingest modules used to execute + * Constructs a pipeline of data artifact ingest modules used to perform * data artifact ingest tasks for an ingest job. * - * @param ingestJobPipeline The ingest job pipeline that owns this ingest - * task pipeline. - * @param moduleTemplates The ingest module templates that define this - * pipeline. May be an empty list. + * @param ingestJobExecutor The ingest job executor for this pipeline. + * @param moduleTemplates The ingest module templates to be used to + * construct the ingest modules for this pipeline. + * May be an empty list if this type of pipeline is + * not needed for the ingest job. */ - DataArtifactIngestPipeline(IngestJobPipeline ingestJobPipeline, List moduleTemplates) { - super(ingestJobPipeline, moduleTemplates); + DataArtifactIngestPipeline(IngestJobExecutor ingestJobExecutor, List moduleTemplates) { + super(ingestJobExecutor, moduleTemplates); } @Override Optional> acceptModuleTemplate(IngestModuleTemplate template) { - Optional> module = Optional.empty(); + Optional> module = Optional.empty(); if (template.isDataArtifactIngestModuleTemplate()) { DataArtifactIngestModule ingestModule = template.createDataArtifactIngestModule(); module = Optional.of(new DataArtifactIngestPipelineModule(ingestModule, template.getModuleName())); @@ -52,18 +53,18 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline { + static final class DataArtifactIngestPipelineModule extends IngestPipeline.PipelineModule { private final DataArtifactIngestModule module; @@ -80,7 +81,7 @@ final class DataArtifactIngestPipeline extends IngestTaskPipeline { +final class DataSourceIngestPipeline extends IngestPipeline { private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName()); private static final IngestManager ingestManager = IngestManager.getInstance(); @@ -38,17 +38,19 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline moduleTemplates) { - super(ingestJobPipeline, moduleTemplates); + DataSourceIngestPipeline(IngestJobExecutor ingestJobExecutor, List moduleTemplates) { + super(ingestJobExecutor, moduleTemplates); } @Override - Optional> acceptModuleTemplate(IngestModuleTemplate template) { - Optional> module = Optional.empty(); + Optional> acceptModuleTemplate(IngestModuleTemplate template) { + Optional> module = Optional.empty(); if (template.isDataSourceIngestModuleTemplate()) { DataSourceIngestModule ingestModule = template.createDataSourceIngestModule(); module = Optional.of(new DataSourcePipelineModule(ingestModule, template.getModuleName())); @@ -69,7 +71,7 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline { + static final class DataSourcePipelineModule extends IngestPipeline.PipelineModule { private final DataSourceIngestModule module; @@ -83,18 +85,18 @@ final class DataSourceIngestPipeline extends IngestTaskPipeline { +final class FileIngestPipeline extends IngestPipeline { private static final int FILE_BATCH_SIZE = 500; private static final String SAVE_RESULTS_ACTIVITY = Bundle.FileIngestPipeline_SaveResults_Activity(); private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName()); private static final IngestManager ingestManager = IngestManager.getInstance(); - private final IngestJobPipeline ingestJobPipeline; + private final IngestJobExecutor ingestJobExecutor; private final List fileBatch; /** * Constructs a pipeline of file ingest modules for executing file ingest * tasks for an ingest job. * - * @param ingestJobPipeline The ingest job pipeline that owns this pipeline. - * @param moduleTemplates The ingest module templates that define this - * pipeline. + * @param ingestJobExecutor The ingest job executor for this pipeline. + * @param moduleTemplates The ingest module templates to be used to + * construct the ingest modules for this pipeline. + * May be an empty list if this type of pipeline is + * not needed for the ingest job. */ - FileIngestPipeline(IngestJobPipeline ingestJobPipeline, List moduleTemplates) { - super(ingestJobPipeline, moduleTemplates); - this.ingestJobPipeline = ingestJobPipeline; + FileIngestPipeline(IngestJobExecutor ingestJobExecutor, List moduleTemplates) { + super(ingestJobExecutor, moduleTemplates); + this.ingestJobExecutor = ingestJobExecutor; fileBatch = new ArrayList<>(); } @Override - Optional> acceptModuleTemplate(IngestModuleTemplate template) { - Optional> module = Optional.empty(); + Optional> acceptModuleTemplate(IngestModuleTemplate template) { + Optional> module = Optional.empty(); if (template.isFileIngestModuleTemplate()) { FileIngestModule ingestModule = template.createFileIngestModule(); module = Optional.of(new FileIngestPipelineModule(ingestModule, template.getModuleName())); @@ -73,18 +75,18 @@ final class FileIngestPipeline extends IngestTaskPipeline { } @Override - void prepareForTask(FileIngestTask task) throws IngestTaskPipelineException { + void prepareForTask(FileIngestTask task) throws IngestPipelineException { } @Override - void cleanUpAfterTask(FileIngestTask task) throws IngestTaskPipelineException { + void cleanUpAfterTask(FileIngestTask task) throws IngestPipelineException { try { ingestManager.setIngestTaskProgress(task, SAVE_RESULTS_ACTIVITY); AbstractFile file = task.getFile(); file.close(); cacheFileForBatchUpdate(file); } catch (TskCoreException ex) { - throw new IngestTaskPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS + throw new IngestPipelineException(String.format("Failed to get file (file objId = %d)", task.getFileId()), ex); //NON-NLS } finally { ingestManager.setIngestTaskProgressCompleted(task); } @@ -96,7 +98,7 @@ final class FileIngestPipeline extends IngestTaskPipeline { Date start = new Date(); try { updateBatchedFiles(); - } catch (IngestTaskPipelineException ex) { + } catch (IngestPipelineException ex) { errors.add(new IngestModuleError(SAVE_RESULTS_ACTIVITY, ex)); } Date finish = new Date(); @@ -113,9 +115,9 @@ final class FileIngestPipeline extends IngestTaskPipeline { * * @param file The file. * - * @throws IngestTaskPipelineException if the case database update fails. + * @throws IngestPipelineException if the case database update fails. */ - private void cacheFileForBatchUpdate(AbstractFile file) throws IngestTaskPipelineException { + private void cacheFileForBatchUpdate(AbstractFile file) throws IngestPipelineException { /* * Only one file ingest thread at a time will try to access the file * cache. The synchronization here is to ensure visibility of the files @@ -134,9 +136,9 @@ final class FileIngestPipeline extends IngestTaskPipeline { * Updates the case database with new properties added to the files in the * cache by the ingest modules that processed them. * - * @throws IngestTaskPipelineException if the case database update fails. + * @throws IngestPipelineException if the case database update fails. */ - private void updateBatchedFiles() throws IngestTaskPipelineException { + private void updateBatchedFiles() throws IngestPipelineException { /* * Only one file ingest thread at a time will try to access the file * cache. The synchronization here is to ensure visibility of the files @@ -146,7 +148,7 @@ final class FileIngestPipeline extends IngestTaskPipeline { synchronized (fileBatch) { CaseDbTransaction transaction = null; try { - if (!ingestJobPipeline.isCancelled()) { + if (!ingestJobExecutor.isCancelled()) { Case currentCase = Case.getCurrentCaseThrows(); SleuthkitCase caseDb = currentCase.getSleuthkitCase(); transaction = caseDb.beginTransaction(); @@ -166,7 +168,7 @@ final class FileIngestPipeline extends IngestTaskPipeline { logger.log(Level.SEVERE, "Error rolling back transaction after failure to save updated properties for cached files from tasks", ex1); } } - throw new IngestTaskPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS + throw new IngestPipelineException("Failed to save updated properties for cached files from tasks", ex); //NON-NLS } finally { fileBatch.clear(); } @@ -177,7 +179,7 @@ final class FileIngestPipeline extends IngestTaskPipeline { * A wrapper that adds ingest infrastructure operations to a file ingest * module. */ - static final class FileIngestPipelineModule extends IngestTaskPipeline.PipelineModule { + static final class FileIngestPipelineModule extends IngestPipeline.PipelineModule { private final FileIngestModule module; @@ -195,7 +197,7 @@ final class FileIngestPipeline extends IngestTaskPipeline { } @Override - void executeTask(IngestJobPipeline ingestJobPipeline, FileIngestTask task) throws IngestModuleException { + void process(IngestJobExecutor ingestJobExecutor, FileIngestTask task) throws IngestModuleException { AbstractFile file = null; try { file = task.getFile(); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java index 280c81c57b..63bf99dfc4 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/FileIngestTask.java @@ -25,7 +25,7 @@ import org.sleuthkit.datamodel.TskCoreException; /** * A file ingest task that will be executed by an ingest thread using a given - * ingest job pipeline. + * ingest job executor. */ final class FileIngestTask extends IngestTask { @@ -34,13 +34,13 @@ final class FileIngestTask extends IngestTask { /** * Constructs a file ingest task that will be executed by an ingest thread - * using a given ingest job pipeline. + * using a given ingest job executor. * - * @param ingestJobPipeline The ingest job pipeline to use to execute the + * @param ingestJobPipeline The ingest job executor to use to execute the * task. * @param file The file to be processed. */ - FileIngestTask(IngestJobPipeline ingestJobPipeline, AbstractFile file) { + FileIngestTask(IngestJobExecutor ingestJobPipeline, AbstractFile file) { super(ingestJobPipeline); this.file = file; fileId = file.getId(); @@ -48,15 +48,15 @@ final class FileIngestTask extends IngestTask { /** * Constructs a file ingest task that will be executed by an ingest thread - * using a given ingest job pipeline. This constructor supports streaming + * using a given ingest job executor. This constructor supports streaming * ingest by deferring the construction of the AbstractFile object for this * task to conserve heap memory. * - * @param ingestJobPipeline The ingest job pipeline to use to execute the + * @param ingestJobPipeline The ingest job executor to use to execute the * task. * @param fileId The object ID of the file to be processed. */ - FileIngestTask(IngestJobPipeline ingestJobPipeline, long fileId) { + FileIngestTask(IngestJobExecutor ingestJobPipeline, long fileId) { super(ingestJobPipeline); this.fileId = fileId; } @@ -88,7 +88,7 @@ final class FileIngestTask extends IngestTask { @Override void execute(long threadId) { super.setThreadId(threadId); - getIngestJobPipeline().execute(this); + getIngestJobExecutor().execute(this); } @Override @@ -100,19 +100,19 @@ final class FileIngestTask extends IngestTask { return false; } FileIngestTask other = (FileIngestTask) obj; - IngestJobPipeline thisPipeline = getIngestJobPipeline(); - IngestJobPipeline otherPipeline = other.getIngestJobPipeline(); + IngestJobExecutor thisPipeline = getIngestJobExecutor(); + IngestJobExecutor otherPipeline = other.getIngestJobExecutor(); if (thisPipeline != otherPipeline && (thisPipeline == null || !thisPipeline.equals(otherPipeline))) { return false; } - return (this.fileId == other.fileId); + return (getFileId() == other.getFileId()); } @Override public int hashCode() { int hash = 5; - hash = 47 * hash + Objects.hashCode(getIngestJobPipeline()); - hash = 47 * hash + Objects.hashCode(this.fileId); + hash = 47 * hash + Objects.hashCode(getIngestJobExecutor()); + hash = 47 * hash + Objects.hashCode(getFileId()); return hash; } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java index e49c6ab9ee..37e4b549ee 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJob.java @@ -28,10 +28,11 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataArtifact; /** - * Analyzes one or more data sources using a set of ingest modules specified via - * ingest job settings. + * Analyzes a data sources using a set of ingest modules specified via ingest + * job settings. */ public final class IngestJob { @@ -73,17 +74,17 @@ public final class IngestJob { private final List files = new ArrayList<>(); private final Mode ingestMode; private final IngestJobSettings settings; - private volatile IngestJobPipeline ingestJobPipeline; + private volatile IngestJobExecutor ingestModuleExecutor; private volatile CancellationReason cancellationReason; /** * Constructs a batch mode ingest job that analyzes a data source using a - * set of ingest modules specified via ingest job settings. Either all of - * the files in the data source or a given subset of the files will be - * analyzed. + * set of ingest modules specified via ingest job settings. * * @param dataSource The data source to be analyzed. - * @param files A subset of the files from the data source. + * @param files A subset of the files from the data source to be + * analyzed, may be empty if all of the files should be + * analyzed. * @param settings The ingest job settings. */ IngestJob(Content dataSource, List files, IngestJobSettings settings) { @@ -91,13 +92,6 @@ public final class IngestJob { this.files.addAll(files); } - /** - * Constructs an ingest job that analyzes a data source using a set of - * ingest modules specified via ingest job settings, possibly using an - * ingest stream. - * - * @param settings The ingest job settings. - */ /** * Constructs an ingest job that analyzes a data source using a set of * ingest modules specified via ingest job settings, possibly using an @@ -108,7 +102,7 @@ public final class IngestJob { * @param settings The ingest job settings. */ IngestJob(Content dataSource, Mode ingestMode, IngestJobSettings settings) { - this.id = IngestJob.nextId.getAndIncrement(); + id = IngestJob.nextId.getAndIncrement(); this.dataSource = dataSource; this.settings = settings; this.ingestMode = ingestMode; @@ -125,6 +119,15 @@ public final class IngestJob { return this.id; } + /** + * Gets the data source to be analyzed by this job. + * + * @return The data source. + */ + Content getDataSource() { + return dataSource; + } + /** * Checks to see if this ingest job has at least one non-empty ingest module * pipeline. @@ -136,31 +139,41 @@ public final class IngestJob { } /** - * Adds a set of files to this ingest job if it is running in streaming + * Adds a set of files to this ingest job, if it is running in streaming * ingest mode. * * @param fileObjIds The object IDs of the files. */ - void addStreamingIngestFiles(List fileObjIds) { + void addStreamedFiles(List fileObjIds) { if (ingestMode == Mode.STREAMING) { - if (ingestJobPipeline != null) { - ingestJobPipeline.addStreamedFiles(fileObjIds); + if (ingestModuleExecutor != null) { + ingestModuleExecutor.addStreamedFiles(fileObjIds); } else { - logger.log(Level.SEVERE, "Attempted to add streamed ingest files with no ingest pipeline"); + logger.log(Level.SEVERE, "Attempted to add streamed files with no ingest pipeline"); } } else { - logger.log(Level.SEVERE, "Attempted to add streamed ingest files to batch ingest job"); + logger.log(Level.SEVERE, "Attempted to add streamed files to batch ingest job"); } } + /** + * Adds one or more data artifacts to this ingest job for processing by its + * data artifact ingest modules. + * + * @param dataArtifacts The data artifacts. + */ + void addDataArtifacts(List dataArtifacts) { + ingestModuleExecutor.addDataArtifacts(dataArtifacts); + } + /** * Starts data source level analysis for this job if it is running in * streaming ingest mode. */ void processStreamingIngestDataSource() { if (ingestMode == Mode.STREAMING) { - if (ingestJobPipeline != null) { - ingestJobPipeline.addStreamedDataSource(); + if (ingestModuleExecutor != null) { + ingestModuleExecutor.startStreamingModeDataSourceAnalysis(); } else { logger.log(Level.SEVERE, "Attempted to start data source analaysis with no ingest pipeline"); } @@ -176,16 +189,16 @@ public final class IngestJob { * @return A collection of ingest module start up errors, empty on success. */ synchronized List start() throws InterruptedException { - if (ingestJobPipeline != null) { + if (ingestModuleExecutor != null) { logger.log(Level.SEVERE, "Attempt to start ingest job that has already been started"); return Collections.emptyList(); } - ingestJobPipeline = new IngestJobPipeline(this, dataSource, files, settings); + ingestModuleExecutor = new IngestJobExecutor(this, dataSource, files, settings); List errors = new ArrayList<>(); - errors.addAll(ingestJobPipeline.startUp()); + errors.addAll(ingestModuleExecutor.startUp()); if (errors.isEmpty()) { - IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestJobPipeline.getDataSource()); + IngestManager.getInstance().fireDataSourceAnalysisStarted(id, ingestModuleExecutor.getDataSource()); } else { cancel(CancellationReason.INGEST_MODULES_STARTUP_FAILED); } @@ -220,7 +233,7 @@ public final class IngestJob { */ public ProgressSnapshot getSnapshot(boolean includeIngestTasksSnapshot) { ProgressSnapshot snapshot = null; - if (ingestJobPipeline != null) { + if (ingestModuleExecutor != null) { return new ProgressSnapshot(includeIngestTasksSnapshot); } return snapshot; @@ -233,8 +246,8 @@ public final class IngestJob { */ Snapshot getDiagnosticStatsSnapshot() { Snapshot snapshot = null; - if (ingestJobPipeline != null) { - snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(true); + if (ingestModuleExecutor != null) { + snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(true); } return snapshot; } @@ -272,8 +285,8 @@ public final class IngestJob { * ingest manager's ingest jobs list lock. */ new Thread(() -> { - if (ingestJobPipeline != null) { - ingestJobPipeline.cancel(reason); + if (ingestModuleExecutor != null) { + ingestModuleExecutor.cancel(reason); } }).start(); } @@ -284,7 +297,7 @@ public final class IngestJob { * @return The cancellation reason, may be not cancelled. */ public CancellationReason getCancellationReason() { - return this.cancellationReason; + return cancellationReason; } /** @@ -294,18 +307,16 @@ public final class IngestJob { * @return True or false. */ public boolean isCancelled() { - return (CancellationReason.NOT_CANCELLED != this.cancellationReason); + return (CancellationReason.NOT_CANCELLED != cancellationReason); } /** - * Provides a callback for the ingest modules pipeline, allowing this ingest + * Provides a callback for the ingest module executor, allowing this ingest * job to notify the ingest manager when it is complete. - * - * @param ingestJobPipeline A completed ingestJobPipeline. */ - void notifyIngestPipelineShutDown() { + void notifyIngestPipelinesShutDown() { IngestManager ingestManager = IngestManager.getInstance(); - if (!ingestJobPipeline.isCancelled()) { + if (!ingestModuleExecutor.isCancelled()) { ingestManager.fireDataSourceAnalysisCompleted(id, dataSource); } else { IngestManager.getInstance().fireDataSourceAnalysisCancelled(id, dataSource); @@ -423,11 +434,7 @@ public final class IngestJob { * stats part of the snapshot. */ private ProgressSnapshot(boolean includeIngestTasksSnapshot) { - /* - * Note that the getSnapshot() will not construct a ProgressSnapshot - * if ingestJobPipeline is null. - */ - Snapshot snapshot = ingestJobPipeline.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot); + Snapshot snapshot = ingestModuleExecutor.getDiagnosticStatsSnapshot(includeIngestTasksSnapshot); dataSourceProcessingSnapshot = new DataSourceProcessingSnapshot(snapshot); jobCancellationRequested = IngestJob.this.isCancelled(); jobCancellationReason = IngestJob.this.getCancellationReason(); @@ -444,7 +451,7 @@ public final class IngestJob { DataSourceIngestModuleHandle moduleHandle = null; DataSourceIngestPipeline.DataSourcePipelineModule module = dataSourceProcessingSnapshot.getDataSourceLevelIngestModule(); if (module != null) { - moduleHandle = new DataSourceIngestModuleHandle(ingestJobPipeline, module); + moduleHandle = new DataSourceIngestModuleHandle(ingestModuleExecutor, module); } return moduleHandle; } @@ -507,7 +514,7 @@ public final class IngestJob { */ public static class DataSourceIngestModuleHandle { - private final IngestJobPipeline ingestJobPipeline; + private final IngestJobExecutor ingestJobExecutor; private final DataSourceIngestPipeline.DataSourcePipelineModule module; private final boolean cancelled; @@ -516,14 +523,14 @@ public final class IngestJob { * used to get basic information about the module and to request * cancellation of the module. * - * @param ingestJobPipeline The ingestJobPipeline that owns the data + * @param ingestJobExecutor The ingest job executor that owns the data * source level ingest module. * @param module The data source level ingest module. */ - private DataSourceIngestModuleHandle(IngestJobPipeline ingestJobPipeline, DataSourceIngestPipeline.DataSourcePipelineModule module) { - this.ingestJobPipeline = ingestJobPipeline; + private DataSourceIngestModuleHandle(IngestJobExecutor ingestJobExecutor, DataSourceIngestPipeline.DataSourcePipelineModule module) { + this.ingestJobExecutor = ingestJobExecutor; this.module = module; - this.cancelled = ingestJobPipeline.currentDataSourceIngestModuleIsCancelled(); + this.cancelled = ingestJobExecutor.currentDataSourceIngestModuleIsCancelled(); } /** @@ -533,7 +540,7 @@ public final class IngestJob { * @return The display name. */ public String displayName() { - return this.module.getDisplayName(); + return module.getDisplayName(); } /** @@ -543,7 +550,7 @@ public final class IngestJob { * @return The module processing start time. */ public Date startTime() { - return this.module.getProcessingStartTime(); + return module.getProcessingStartTime(); } /** @@ -553,7 +560,7 @@ public final class IngestJob { * @return True or false. */ public boolean isCancelled() { - return this.cancelled; + return cancelled; } /** @@ -567,8 +574,8 @@ public final class IngestJob { * could perhaps be solved by adding a cancel() API to the * IngestModule interface. */ - if (this.ingestJobPipeline.getCurrentDataSourceIngestModule() == this.module) { - this.ingestJobPipeline.cancelCurrentDataSourceIngestModule(); + if (ingestJobExecutor.getCurrentDataSourceIngestModule() == module) { + ingestJobExecutor.cancelCurrentDataSourceIngestModule(); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java index 17c88969a7..c25a147bcd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobContext.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.ingest; import java.util.List; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.DataArtifact; /** * Provides an ingest module with services specific to the ingest job of which @@ -29,16 +28,16 @@ import org.sleuthkit.datamodel.DataArtifact; */ public final class IngestJobContext { - private final IngestJobPipeline ingestJobPipeline; + private final IngestJobExecutor ingestJobExecutor; /** * Constructs an ingest job context object that provides an ingest module * with services specific to the ingest job of which the module is a part. * - * @param ingestJobPipeline The ingest pipeline for the job. + * @param ingestJobExecutor The ingest executor for the job. */ - IngestJobContext(IngestJobPipeline ingestJobPipeline) { - this.ingestJobPipeline = ingestJobPipeline; + IngestJobContext(IngestJobExecutor ingestJobExecutor) { + this.ingestJobExecutor = ingestJobExecutor; } /** @@ -47,7 +46,7 @@ public final class IngestJobContext { * @return The context string. */ public String getExecutionContext() { - return ingestJobPipeline.getExecutionContext(); + return ingestJobExecutor.getExecutionContext(); } /** @@ -56,7 +55,7 @@ public final class IngestJobContext { * @return The data source. */ public Content getDataSource() { - return ingestJobPipeline.getDataSource(); + return ingestJobExecutor.getDataSource(); } /** @@ -65,7 +64,7 @@ public final class IngestJobContext { * @return The ID. */ public long getJobId() { - return ingestJobPipeline.getIngestJobId(); + return ingestJobExecutor.getIngestJobId(); } /** @@ -79,7 +78,7 @@ public final class IngestJobContext { */ @Deprecated public boolean isJobCancelled() { - return ingestJobPipeline.isCancelled(); + return ingestJobExecutor.isCancelled(); } /** @@ -91,7 +90,7 @@ public final class IngestJobContext { * @return True or false. */ public boolean dataSourceIngestIsCancelled() { - return ingestJobPipeline.currentDataSourceIngestModuleIsCancelled() || ingestJobPipeline.isCancelled(); + return ingestJobExecutor.currentDataSourceIngestModuleIsCancelled() || ingestJobExecutor.isCancelled(); } /** @@ -106,7 +105,7 @@ public final class IngestJobContext { * It is not currently possible to cancel individual file ingest * modules. */ - return ingestJobPipeline.isCancelled(); + return ingestJobExecutor.isCancelled(); } /** @@ -122,7 +121,7 @@ public final class IngestJobContext { * It is not currently possible to cancel individual data artifact * ingest modules. */ - return ingestJobPipeline.isCancelled(); + return ingestJobExecutor.isCancelled(); } /** @@ -132,7 +131,7 @@ public final class IngestJobContext { * @return True or false. */ public boolean processingUnallocatedSpace() { - return ingestJobPipeline.shouldProcessUnallocatedSpace(); + return ingestJobExecutor.shouldProcessUnallocatedSpace(); } /** @@ -146,8 +145,8 @@ public final class IngestJobContext { @Deprecated public void scheduleFiles(List files) { addFilesToJob(files); - } - + } + /** * Adds one or more files, e.g., extracted or carved files, to the ingest * job for processing by its file ingest modules. @@ -155,17 +154,7 @@ public final class IngestJobContext { * @param files The files. */ public void addFilesToJob(List files) { - ingestJobPipeline.addFiles(files); - } - - /** - * Adds one or more data artifacts to the ingest job for processing by its - * data artifact ingest modules. - * - * @param artifacts The artifacts. - */ - public void addDataArtifactsToJob(List artifacts) { - ingestJobPipeline.addDataArtifacts(artifacts); + ingestJobExecutor.addFiles(files); } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java similarity index 66% rename from Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java rename to Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java index 74f25abe73..7a93b15b22 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobPipeline.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobExecutor.java @@ -19,7 +19,6 @@ package org.sleuthkit.autopsy.ingest; import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.LinkedHashMap; @@ -57,15 +56,13 @@ import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.DataSource; /** - * A pipeline of ingest modules for analyzing one of the data sources in an - * ingest job. The ingest modules are organized into child pipelines by ingest - * module type and are run in stages. + * Manages the construction, start up, execution, and shut down of the ingest + * module pipelines for an ingest job. */ -final class IngestJobPipeline { +final class IngestJobExecutor { private static final String AUTOPSY_MODULE_PREFIX = "org.sleuthkit.autopsy"; - - private static final Logger logger = Logger.getLogger(IngestJobPipeline.class.getName()); + private static final Logger logger = Logger.getLogger(IngestJobExecutor.class.getName()); /* * A regular expression for identifying the proxy classes Jython generates @@ -75,117 +72,131 @@ final class IngestJobPipeline { private static final Pattern JYTHON_MODULE_REGEX = Pattern.compile("org\\.python\\.proxies\\.(.+?)\\$(.+?)(\\$[0-9]*)?$"); /* - * These fields define an ingest pipeline: the ingest job that owns the - * pipeline, the user's ingest job settings, and the data source to be - * analyzed. Optionally, there is a set of files to be analyzed instead of - * analyzing all of the files in the data source. + * These fields are the identity of this object: the parent ingest job, the + * user's ingest job settings, and the data source to be analyzed by the + * ingest module pipelines. Optionally, there is a set of files to be + * analyzed instead of analyzing all of the files in the data source. */ private final IngestJob ingestJob; private final IngestJobSettings settings; private DataSource dataSource; private final List files; + private final long createTime; /* - * An ingest pipeline runs its ingest modules in stages. + * There are separate pipelines for high-priority and low priority data + * source level ingest modules. These pipelines are run sequentially, not + * simultaneously. */ - private static enum Stages { - /* - * The pipeline is instantiating ingest modules and loading them into - * its child ingest module pipelines. - */ - INITIALIZATION, - /* - * This stage is unique to a streaming mode ingest job. The pipeline is - * running file ingest modules on files streamed to it via - * addStreamedFiles(). If configured to have data artifact ingest - * modules, the pipeline is also running them on data artifacts - * generated by the analysis of the streamed files. This stage ends when - * the data source is streamed to the pipeline via - * addStreamedDataSource(). - */ - FIRST_STAGE_STREAMING, - /* - * The pipeline is running the following three types of ingest modules: - * higher priority data source level ingest modules, file ingest - * modules, and data artifact ingest modules. - */ - FIRST_STAGE, - /** - * The pipeline is running lower priority, usually long-running, data - * source level ingest modules and data artifact ingest modules. - */ - SECOND_STAGE, - /** - * The pipeline is shutting down its ingest modules. - */ - FINALIZATION - }; - private volatile Stages stage = IngestJobPipeline.Stages.INITIALIZATION; - - /* - * The stage field is volatile to allow it to be read by multiple threads. - * This lock is used not to guard the stage field, but to make stage - * transitions atomic. - */ - private final Object stageTransitionLock = new Object(); - - /* - * An ingest pipeline has separate data source level ingest module pipelines - * for the first and second stages. Longer running, lower priority modules - * belong in the second stage pipeline. - */ - private DataSourceIngestPipeline firstStageDataSourceIngestPipeline; - private DataSourceIngestPipeline secondStageDataSourceIngestPipeline; + private DataSourceIngestPipeline highPriorityDataSourceIngestPipeline; + private DataSourceIngestPipeline lowPriorityDataSourceIngestPipeline; private volatile DataSourceIngestPipeline currentDataSourceIngestPipeline; /* - * An ingest pipeline has a collection of identical file ingest module - * pipelines, one for each file ingest thread in the ingest manager. The - * file ingest threads take and return file ingest pipeline copies from a - * blocking queue as they work through the file ingest tasks for the ingest - * job. Additionally, a fixed list of all of the file ingest module - * pipelines is used to bypass the blocking queue when cycling through the - * pipelines to make ingest progress snapshots. + * There are one or more identical file ingest module pipelines, based on + * the number of file ingest threads in the ingest manager. References to + * the file ingest pipelines are put into two collections, each with its own + * purpose. A blocking queue allows file ingest threads to take and return + * file ingest pipelines as they work through the file ingest tasks for one + * or more ingest jobs. Having the same number of pipelines as threads + * ensures that a file ingest thread will never be idle as long as there are + * file ingest tasks still to do, regardless of the number of ingest jobs in + * progress. Additionally, a fixed list is used to cycle through the file + * ingest module pipelines to make ingest progress snapshots. */ private final LinkedBlockingQueue fileIngestPipelinesQueue = new LinkedBlockingQueue<>(); private final List fileIngestPipelines = new ArrayList<>(); /* - * An ingest pipeline has a single data artifact ingest module pipeline. + * There is at most one data artifact ingest module pipeline. */ private DataArtifactIngestPipeline artifactIngestPipeline; /* - * An ingest pipeline supports cancellation of analysis by individual data - * source level ingest modules or cancellation of all remaining analysis by - * all of its ingest modules. Cancellation works by setting flags that are - * checked by the ingest module pipelines every time they transition from - * one module to another. Ingest modules are also expected to check these - * flags (via the ingest job context) and stop processing if they are set. - * This approach to cancellation means that there can be a variable length - * delay between a cancellation request and its fulfillment. Analysis - * already completed at the time that cancellation occurs is not discarded. + * The construction, start up, execution, and shut down of the ingest module + * pipelines for an ingest job is done in stages. */ - private volatile boolean currentDataSourceIngestModuleCancelled; - private final List cancelledDataSourceIngestModules = new CopyOnWriteArrayList<>(); - private volatile boolean cancelled; - private volatile IngestJob.CancellationReason cancellationReason = IngestJob.CancellationReason.NOT_CANCELLED; + private static enum IngestJobStage { + /* + * In this stage, the ingest module pipelines are constructed per the + * user's ingest job settings. This stage ends when all of the ingest + * module pipelines for the ingest job are ready to run. + */ + PIPELINES_START_UP, + /* + * This stage is unique to a streaming mode ingest job. In this stage, + * file ingest module pipelines are analyzing files streamed to them via + * addStreamedFiles(). If the ingest job is configured to have a data + * artifact ingest pipeline, that pipeline is also analyzing any data + * artifacts generated by the file ingest modules. This stage ends when + * addStreamedDataSource() is called. + */ + STREAMED_FILE_ANALYSIS_ONLY, + /* + * In this stage, file ingest module pipelines and/or a pipeline of + * higher-priority data source level ingest modules are running. If the + * ingest job is configured to have a data artifact ingest pipeline, + * that pipeline is also analyzing any data artifacts generated by the + * file and/or data source level ingest modules. + */ + FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS, + /** + * In this stage, a pipeline of lower-priority, usually long-running + * data source level ingest ingest modules is running. If the ingest job + * is configured to have a data artifact ingest pipeline, that pipeline + * is also analyzing any data artifacts generated by the data source + * level ingest modules. + */ + LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS, + /** + * In this stage, The pipeline is shutting down its ingest modules. + */ + PIPELINES_SHUT_DOWN + }; /* - * An ingest pipeline interacts with the ingest task scheduler to create - * ingest tasks for analyzing the data source, files and data artifacts that - * are the subject of the ingest job. The scheduler queues the tasks for the - * ingest manager's ingest threads. The ingest tasks are the units of work - * for the ingest pipeline's child ingest module pipelines. + * The stage field is volatile to allow it to be read by multiple threads. + * So the stage transition lock is used not to guard the stage field, but to + * coordinate stage transitions. + */ + private volatile IngestJobStage stage = IngestJobExecutor.IngestJobStage.PIPELINES_START_UP; + private final Object stageTransitionLock = new Object(); + + /* + * During each stage of the ingest job, this object interacts with the + * ingest task scheduler to create ingest tasks for analyzing the data + * source, files and data artifacts that are the subject of the ingest job. + * The scheduler queues the tasks for the ingest manager's ingest threads. + * The ingest tasks are the units of work for the ingest module pipelines. */ private static final IngestTasksScheduler taskScheduler = IngestTasksScheduler.getInstance(); /* - * If running with a GUI, an ingest pipeline reports analysis progress and - * allows a user to cancel all or part of the analysis using progress bars - * in the lower right hand corner of the main application window. + * Two levels of ingest job cancellation are supported: 1) cancellation of + * analysis by individual data source level ingest modules, and 2) + * cancellation of all remaining analysis by all of the ingest modules. + * Cancellation works by setting flags that are checked by the ingest module + * pipelines every time they transition from one module to another. Ingest + * modules are also expected to check these flags (via the ingest job + * context) and stop processing if they are set. This approach to + * cancellation means that there can be a variable length delay between a + * cancellation request and its fulfillment. Analysis already completed at + * the time that cancellation occurs is NOT discarded. */ - private final boolean doUI; + private volatile boolean currentDataSourceIngestModuleCancelled; + private final List cancelledDataSourceIngestModules = new CopyOnWriteArrayList<>(); + private volatile boolean jobCancelled; + private volatile IngestJob.CancellationReason cancellationReason = IngestJob.CancellationReason.NOT_CANCELLED; + + /* + * If running in the NetBeans thick client application version of Autopsy, + * NetBeans progress bars are used to display ingest job progress in the + * lower right hand corner of the main application window. A layer of + * abstraction to allow alternate representations of progress could be used + * here, as it is in other places in the application, to better decouple + * this object from the application's presentation layer. + */ + private final boolean usingNetBeansGUI; private final Object dataSourceIngestProgressLock = new Object(); private ProgressHandle dataSourceIngestProgressBar; private final Object fileIngestProgressLock = new Object(); @@ -197,30 +208,25 @@ final class IngestJobPipeline { private ProgressHandle artifactIngestProgressBar; /* - * Ingest job details are tracked using this object and are recorded in the - * case database when the pipeline starts up and shuts down. + * The ingest job details that are stored to the case database are tracked + * using this object and are recorded in the database when the ingest module + * pipelines are started up and shut down. */ private volatile IngestJobInfo ingestJobInfo; - /** - * An ingest pipeline uses this field to report its creation time. - */ - private final long createTime; - /* - * An ingest pipeline allows ingest module pipelines to register and - * unregister the ingest thread they are running in when a scheduled ingest - * pause occurs and the threads are made to sleep. This allows interruption - * of these threads if the ingest job is canceled. + * Ingest module pipelines register and unregister the ingest thread they + * are running in when a scheduled ingest pause occurs and the threads are + * made to sleep. This allows interruption of these sleeping threads if the + * ingest job is canceled while paused. */ private final Object threadRegistrationLock = new Object(); @GuardedBy("threadRegistrationLock") private final Set pausedIngestThreads = new HashSet<>(); /** - * Constructs a pipeline of ingest modules for analyzing one of the data - * sources in an ingest job. The ingest modules are organized into child - * pipelines by ingest module type and are run in stages. + * Constructs an object that manages the construction, start up, execution, + * and shut down of the ingest module pipelines for an ingest job. * * @param ingestJob The ingest job. * @param dataSource The data source. @@ -232,7 +238,7 @@ final class IngestJobPipeline { * @throws InterruptedException Exception thrown if the thread in which the * pipeline is being created is interrupted. */ - IngestJobPipeline(IngestJob ingestJob, Content dataSource, List files, IngestJobSettings settings) throws InterruptedException { + IngestJobExecutor(IngestJob ingestJob, Content dataSource, List files, IngestJobSettings settings) throws InterruptedException { if (!(dataSource instanceof DataSource)) { throw new IllegalArgumentException("Passed dataSource that does not implement the DataSource interface"); //NON-NLS } @@ -241,9 +247,9 @@ final class IngestJobPipeline { this.files = new ArrayList<>(); this.files.addAll(files); this.settings = settings; - doUI = RuntimeProperties.runningWithGUI(); + usingNetBeansGUI = RuntimeProperties.runningWithGUI(); createTime = new Date().getTime(); - stage = Stages.INITIALIZATION; + stage = IngestJobStage.PIPELINES_START_UP; createIngestModulePipelines(); } @@ -313,7 +319,7 @@ final class IngestJobPipeline { } /** - * Creates the child ingest module pipelines for this ingest pipeline. + * Creates the ingest module pipelines for the ingest job. * * @throws InterruptedException Exception thrown if the thread in which the * pipeline is being created is interrupted. @@ -375,8 +381,8 @@ final class IngestJobPipeline { * Construct the ingest module pipelines from the ingest module pipeline * templates. */ - firstStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourcePipelineTemplate); - secondStageDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourcePipelineTemplate); + highPriorityDataSourceIngestPipeline = new DataSourceIngestPipeline(this, firstStageDataSourcePipelineTemplate); + lowPriorityDataSourceIngestPipeline = new DataSourceIngestPipeline(this, secondStageDataSourcePipelineTemplate); int numberOfFileIngestThreads = IngestManager.getInstance().getNumberOfFileIngestThreads(); for (int i = 0; i < numberOfFileIngestThreads; ++i) { FileIngestPipeline pipeline = new FileIngestPipeline(this, filePipelineTemplate); @@ -414,7 +420,7 @@ final class IngestJobPipeline { } /** - * Gets the ID of the ingest job that owns this ingest pipeline. + * Gets the ID of the ingest job that owns this object. * * @return The ID. */ @@ -423,7 +429,7 @@ final class IngestJobPipeline { } /** - * Gets the ingest execution context name. + * Gets the ingest job execution context name. * * @return The context name. */ @@ -432,7 +438,7 @@ final class IngestJobPipeline { } /** - * Gets the data source to be analyzed by this ingest pipeline. + * Gets the data source of the ingest job. * * @return The data source. */ @@ -441,8 +447,8 @@ final class IngestJobPipeline { } /** - * Queries whether or not unallocated space should be processed by this - * ingest pipeline. + * Queries whether or not unallocated space should be processed for the + * ingest job. * * @return True or false. */ @@ -451,7 +457,7 @@ final class IngestJobPipeline { } /** - * Gets the file ingest filter for this ingest pipeline. + * Gets the file ingest filter for the ingest job. * * @return The filter. */ @@ -460,55 +466,53 @@ final class IngestJobPipeline { } /** - * Checks to see if this ingest pipeline has at least one ingest module to - * run. + * Checks to see if there is at least one ingest module to run. * * @return True or false. */ boolean hasIngestModules() { return hasFileIngestModules() - || hasFirstStageDataSourceIngestModules() - || hasSecondStageDataSourceIngestModules() + || hasHighPriorityDataSourceIngestModules() + || hasLowPriorityDataSourceIngestModules() || hasDataArtifactIngestModules(); } /** - * Checks to see if this ingest pipeline has at least one ingest module to + * Checks to see if there is at least one data source level ingest module to * run. * * @return True or false. */ boolean hasDataSourceIngestModules() { - if (stage == Stages.SECOND_STAGE) { - return hasSecondStageDataSourceIngestModules(); + if (stage == IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS) { + return hasLowPriorityDataSourceIngestModules(); } else { - return hasFirstStageDataSourceIngestModules(); + return hasHighPriorityDataSourceIngestModules(); } } /** - * Checks to see if this ingest pipeline has at least one first stage data - * source level ingest module to run. + * Checks to see if there is at least one high priority data source level + * ingest module to run. * * @return True or false. */ - private boolean hasFirstStageDataSourceIngestModules() { - return (firstStageDataSourceIngestPipeline.isEmpty() == false); + private boolean hasHighPriorityDataSourceIngestModules() { + return (highPriorityDataSourceIngestPipeline.isEmpty() == false); } /** - * Checks to see if this ingest pipeline has at least one second stage data - * source level ingest module to run. + * Checks to see if there is at least one low priority data source level + * ingest module to run. * * @return True or false. */ - private boolean hasSecondStageDataSourceIngestModules() { - return (secondStageDataSourceIngestPipeline.isEmpty() == false); + private boolean hasLowPriorityDataSourceIngestModules() { + return (lowPriorityDataSourceIngestPipeline.isEmpty() == false); } /** - * Checks to see if this ingest pipeline has at least one file ingest module - * to run. + * Checks to see if there is at least one file ingest module to run. * * @return True or false. */ @@ -520,8 +524,8 @@ final class IngestJobPipeline { } /** - * Checks to see if this ingest pipeline has at least one data artifact - * ingest module to run. + * Checks to see if there is at least one data artifact ingest module to + * run. * * @return True or false. */ @@ -530,7 +534,8 @@ final class IngestJobPipeline { } /** - * Starts up this ingest pipeline. + * Determnines which inges job stage to start in and starts up the ingest + * module pipelines. * * @return A collection of ingest module startup errors, empty on success. */ @@ -538,22 +543,71 @@ final class IngestJobPipeline { List errors = startUpIngestModulePipelines(); if (errors.isEmpty()) { recordIngestJobStartUpInfo(); - if (hasFirstStageDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { + if (hasHighPriorityDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { if (ingestJob.getIngestMode() == IngestJob.Mode.STREAMING) { - startFirstStageInStreamingMode(); + startStreamingModeAnalysis(); } else { - startFirstStageInBatchMode(); + startBatchModeAnalysis(); } - } else if (hasSecondStageDataSourceIngestModules()) { - startSecondStage(); + } else if (hasLowPriorityDataSourceIngestModules()) { + startLowPriorityDataSourceAnalysis(); } } return errors; } + /** + * Starts up the ingest module pipelines in this ingest. Note that all of + * the child pipelines are started so that any and all start up errors can + * be returned to the caller. It is important to capture all of the errors, + * because the ingest job will be automatically cancelled and the errors + * will be reported to the user so either the issues can be addressed or the + * modules that can't start up can be disabled before the ingest job is + * attempted again. + * + * @return A list of ingest module startup errors, empty on success. + */ + private List startUpIngestModulePipelines() { + List errors = new ArrayList<>(); + errors.addAll(startUpIngestModulePipeline(highPriorityDataSourceIngestPipeline)); + errors.addAll(startUpIngestModulePipeline(lowPriorityDataSourceIngestPipeline)); + for (FileIngestPipeline pipeline : fileIngestPipelines) { + List filePipelineErrors = startUpIngestModulePipeline(pipeline); + if (!filePipelineErrors.isEmpty()) { + /* + * If one file pipeline copy can't start up, assume that none of + * them will be able to start up for the same reason. + */ + errors.addAll(filePipelineErrors); + break; + } + } + errors.addAll(startUpIngestModulePipeline(artifactIngestPipeline)); + return errors; + } + + /** + * Starts up an ingest module pipeline. If there are any start up errors, + * the pipeline is immediately shut down. + * + * @param pipeline The ingest module pipeline to start up. + * + * @return A list of ingest module startup errors, empty on success. + */ + private List startUpIngestModulePipeline(IngestPipeline pipeline) { + List startUpErrors = pipeline.startUp(); + if (!startUpErrors.isEmpty()) { + List shutDownErrors = pipeline.shutDown(); + if (!shutDownErrors.isEmpty()) { + logIngestModuleErrors(shutDownErrors); + } + } + return startUpErrors; + } + /** * Writes start up data about the ingest job into the case database. The - * case database returns an object that is retained to allow the additon of + * case database returns an object that is retained to allow the addition of * a completion time when the ingest job is finished. */ void recordIngestJobStartUpInfo() { @@ -602,73 +656,24 @@ final class IngestJobPipeline { } /** - * Starts up each of the child ingest module pipelines in this ingest - * pipeline. - * - * Note that all of the child pipelines are started so that any and all - * start up errors can be returned to the caller. It is important to capture - * all of the errors, because the ingest job will be automatically cancelled - * and the errors will be reported to the user so either the issues can be - * addressed or the modules that can't start up can be disabled before the - * ingest job is attempted again. - * - * @return A list of ingest module startup errors, empty on success. + * Starts analysis for a batch mode ingest job. For a batch mode job, all of + * the files in the data source (excepting carved and derived files) have + * already been added to the case database by the data source processor and + * analysis starts in the file and high priority data source level analysis + * stage. */ - private List startUpIngestModulePipelines() { - List errors = new ArrayList<>(); - errors.addAll(startUpIngestModulePipeline(firstStageDataSourceIngestPipeline)); - errors.addAll(startUpIngestModulePipeline(secondStageDataSourceIngestPipeline)); - for (FileIngestPipeline pipeline : fileIngestPipelines) { - List filePipelineErrors = startUpIngestModulePipeline(pipeline); - if (!filePipelineErrors.isEmpty()) { - /* - * If one file pipeline copy can't start up, assume that none of - * them will be able to start up for the same reason. - */ - errors.addAll(filePipelineErrors); - break; - } - } - errors.addAll(startUpIngestModulePipeline(artifactIngestPipeline)); - return errors; - } - - /** - * Starts up an ingest module pipeline. If there are any start up errors, - * the pipeline is immediately shut down. - * - * @param pipeline The ingest task pipeline to start up. - * - * @return A list of ingest module startup errors, empty on success. - */ - private List startUpIngestModulePipeline(IngestTaskPipeline pipeline) { - List startUpErrors = pipeline.startUp(); - if (!startUpErrors.isEmpty()) { - List shutDownErrors = pipeline.shutDown(); - if (!shutDownErrors.isEmpty()) { - logIngestModuleErrors(shutDownErrors); - } - } - return startUpErrors; - } - - /** - * Starts the first stage of this pipeline in batch mode. In batch mode, all - * of the files in the data source (excepting carved and derived files) have - * already been added to the case database by the data source processor. - */ - private void startFirstStageInBatchMode() { + private void startBatchModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage("Starting first stage analysis in batch mode"); //NON-NLS - stage = Stages.FIRST_STAGE; + logInfoMessage(String.format("Starting analysis in batch mode for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + stage = IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; - /* - * Do a count of the files the data source processor has added to - * the case database. This estimate will be used for ingest progress - * snapshots and for the file ingest progress bar if running with a - * GUI. - */ if (hasFileIngestModules()) { + /* + * Do a count of the files the data source processor has added + * to the case database. This number will be used to estimate + * how many files remain to be analyzed as each file ingest task + * is completed. + */ long filesToProcess; if (files.isEmpty()) { filesToProcess = dataSource.accept(new GetFilesCountVisitor()); @@ -680,15 +685,15 @@ final class IngestJobPipeline { } } - /* - * If running with a GUI, start ingest progress bars in the lower - * right hand corner of the main application window. - */ - if (doUI) { + if (usingNetBeansGUI) { + /* + * Start ingest progress bars in the lower right hand corner of + * the main application window. + */ if (hasFileIngestModules()) { startFileIngestProgressBar(); } - if (hasFirstStageDataSourceIngestModules()) { + if (hasHighPriorityDataSourceIngestModules()) { startDataSourceIngestProgressBar(); } if (hasDataArtifactIngestModules()) { @@ -697,23 +702,23 @@ final class IngestJobPipeline { } /* - * Make the first stage data source level ingest pipeline the - * current data source level pipeline. + * Make the high priority data source level ingest module pipeline + * the current data source level ingest module pipeline. */ - currentDataSourceIngestPipeline = firstStageDataSourceIngestPipeline; + currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; /* - * Schedule the first stage ingest tasks and then immediately check - * for stage completion. This is necessary because it is possible - * that zero tasks will actually make it to task execution due to - * the file filter or other ingest job settings. In that case, there - * will never be a stage completion check in an ingest thread - * executing an ingest task, so such a job would run forever without - * a check here. + * Schedule ingest tasks and then immediately check for stage + * completion. This is necessary because it is possible that zero + * tasks will actually make it to task execution due to the file + * filter or other ingest job settings. In that case, there will + * never be a stage completion check in an ingest thread executing + * an ingest task, so such a job would run forever without a check + * here. */ if (!files.isEmpty() && hasFileIngestModules()) { taskScheduler.scheduleFileIngestTasks(this, files); - } else if (hasFirstStageDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { + } else if (hasHighPriorityDataSourceIngestModules() || hasFileIngestModules() || hasDataArtifactIngestModules()) { taskScheduler.scheduleIngestTasks(this); } checkForStageCompleted(); @@ -721,27 +726,28 @@ final class IngestJobPipeline { } /** - * Starts the first stage of this pipeline in streaming mode. In streaming - * mode, the data source processor streams files into the pipeline as it - * adds them to the case database and file level analysis can begin before - * data source level analysis. + * Starts analysis for a streaming mode ingest job. For a streaming mode + * job, the data source processor streams files in as it adds them to the + * case database and file analysis can begin before data source level + * analysis. */ - private void startFirstStageInStreamingMode() { + private void startStreamingModeAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage("Starting first stage analysis in streaming mode"); //NON-NLS - stage = Stages.FIRST_STAGE_STREAMING; + logInfoMessage("Starting data source level analysis in streaming mode"); //NON-NLS + stage = IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY; - if (doUI) { + if (usingNetBeansGUI) { /* - * If running with a GUI, start ingest progress bars in the - * lower right hand corner of the main application window. + * Start ingest progress bars in the lower right hand corner of + * the main application window. */ if (hasFileIngestModules()) { /* * Note that because estimated files remaining to process * still has its initial value of zero, the progress bar * will start in the "indeterminate" state. An estimate of - * the files to process can be computed in + * the files to process can be computed later, when all of + * the files have been added ot the case database. */ startFileIngestProgressBar(); } @@ -754,9 +760,8 @@ final class IngestJobPipeline { /* * Schedule artifact ingest tasks for any artifacts currently in * the case database. This needs to be done before any files or - * the data source are streamed in to avoid analyzing data - * artifacts added to the case database by the data source level - * or file level ingest tasks. + * the data source are streamed in to avoid analyzing the data + * artifacts added to the case database by those tasks twice. */ taskScheduler.scheduleDataArtifactIngestTasks(this); } @@ -764,40 +769,47 @@ final class IngestJobPipeline { } /** - * Notifies the ingest pipeline running in streaming mode that the data - * source is now ready for analysis. + * Signals in streaming mode that all of the files have been added to the + * case database and streamed in, and the data source is now ready for + * analysis. */ - void addStreamedDataSource() { + void startStreamingModeDataSourceAnalysis() { synchronized (stageTransitionLock) { logInfoMessage("Starting full first stage analysis in streaming mode"); //NON-NLS - stage = IngestJobPipeline.Stages.FIRST_STAGE; - currentDataSourceIngestPipeline = firstStageDataSourceIngestPipeline; + stage = IngestJobExecutor.IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; + currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; if (hasFileIngestModules()) { /* * Do a count of the files the data source processor has added - * to the case database. This estimate will be used for ingest - * progress snapshots and for the file ingest progress bar if - * running with a GUI. + * to the case database. This number will be used to estimate + * how many files remain to be analyzed as each file ingest task + * is completed. */ long filesToProcess = dataSource.accept(new GetFilesCountVisitor()); synchronized (fileIngestProgressLock) { estimatedFilesToProcess = filesToProcess; - if (doUI && fileIngestProgressBar != null) { + if (usingNetBeansGUI && fileIngestProgressBar != null) { fileIngestProgressBar.switchToDeterminate((int) estimatedFilesToProcess); } } } - if (doUI) { - if (hasFirstStageDataSourceIngestModules()) { + if (usingNetBeansGUI) { + /* + * Start a data source level ingest progress bar in the lower + * right hand corner of the main application window. The file + * and data artifact ingest progress bars were already started + * in startStreamingModeAnalysis(). + */ + if (hasHighPriorityDataSourceIngestModules()) { startDataSourceIngestProgressBar(); } } - currentDataSourceIngestPipeline = firstStageDataSourceIngestPipeline; - if (hasFirstStageDataSourceIngestModules()) { - IngestJobPipeline.taskScheduler.scheduleDataSourceIngestTask(this); + currentDataSourceIngestPipeline = highPriorityDataSourceIngestPipeline; + if (hasHighPriorityDataSourceIngestModules()) { + IngestJobExecutor.taskScheduler.scheduleDataSourceIngestTask(this); } else { /* * If no data source level ingest task is scheduled at this time @@ -813,35 +825,39 @@ final class IngestJobPipeline { } /** - * Starts the second stage ingest task pipelines. + * Starts low priority data source analysis. */ - private void startSecondStage() { + private void startLowPriorityDataSourceAnalysis() { synchronized (stageTransitionLock) { - if (hasSecondStageDataSourceIngestModules()) { - logInfoMessage(String.format("Starting second stage ingest task pipelines for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS - stage = IngestJobPipeline.Stages.SECOND_STAGE; + if (hasLowPriorityDataSourceIngestModules()) { + logInfoMessage(String.format("Starting low priority data source analysis for %s (objID=%d, jobID=%d)", dataSource.getName(), dataSource.getId(), ingestJob.getId())); //NON-NLS + stage = IngestJobExecutor.IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS; - if (doUI) { + if (usingNetBeansGUI) { startDataSourceIngestProgressBar(); } - currentDataSourceIngestPipeline = secondStageDataSourceIngestPipeline; + currentDataSourceIngestPipeline = lowPriorityDataSourceIngestPipeline; taskScheduler.scheduleDataSourceIngestTask(this); } } } /** - * Starts a progress bar for the results ingest tasks for the ingest job. + * Starts a data artifacts analysis NetBeans progress bar in the lower right + * hand corner of the main application window. The progress bar provides the + * user with a task cancellation button. Pressing it cancels the ingest job. + * Analysis already completed at the time that cancellation occurs is NOT + * discarded. */ private void startArtifactIngestProgressBar() { - if (doUI) { + if (usingNetBeansGUI) { synchronized (artifactIngestProgressLock) { String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataArtifactIngest.displayName", this.dataSource.getName()); artifactIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - IngestJobPipeline.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); return true; } }); @@ -852,62 +868,65 @@ final class IngestJobPipeline { } /** - * Starts a data source level ingest progress bar for this job. + * Starts a data source level analysis NetBeans progress bar in the lower + * right hand corner of the main application window. The progress bar + * provides the user with a task cancellation button. Pressing it cancels + * either the currently running data source level ingest module or the + * entire ingest job. Analysis already completed at the time that + * cancellation occurs is NOT discarded. */ private void startDataSourceIngestProgressBar() { - if (this.doUI) { - synchronized (this.dataSourceIngestProgressLock) { - String displayName = NbBundle.getMessage(this.getClass(), - "IngestJob.progress.dataSourceIngest.initialDisplayName", - this.dataSource.getName()); - this.dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + if (usingNetBeansGUI) { + synchronized (dataSourceIngestProgressLock) { + String displayName = NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName()); + dataSourceIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - // If this method is called, the user has already pressed - // the cancel button on the progress bar and the OK button - // of a cancelation confirmation dialog supplied by - // NetBeans. What remains to be done is to find out whether - // the user wants to cancel only the currently executing - // data source ingest module or the entire ingest job. + /* + * The user has already pressed the cancel button on + * this progress bar, and the OK button of a cancelation + * confirmation dialog supplied by NetBeans. Find out + * whether the user wants to cancel only the currently + * executing data source ingest module or the entire + * ingest job. + */ DataSourceIngestCancellationPanel panel = new DataSourceIngestCancellationPanel(); - String dialogTitle = NbBundle.getMessage(IngestJobPipeline.this.getClass(), "IngestJob.cancellationDialog.title"); + String dialogTitle = NbBundle.getMessage(IngestJobExecutor.this.getClass(), "IngestJob.cancellationDialog.title"); JOptionPane.showConfirmDialog(WindowManager.getDefault().getMainWindow(), panel, dialogTitle, JOptionPane.OK_OPTION, JOptionPane.PLAIN_MESSAGE); if (panel.cancelAllDataSourceIngestModules()) { - IngestJobPipeline.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); } else { - IngestJobPipeline.this.cancelCurrentDataSourceIngestModule(); + IngestJobExecutor.this.cancelCurrentDataSourceIngestModule(); } return true; } }); - this.dataSourceIngestProgressBar.start(); - this.dataSourceIngestProgressBar.switchToIndeterminate(); + dataSourceIngestProgressBar.start(); + dataSourceIngestProgressBar.switchToIndeterminate(); } } } /** - * Starts the file level ingest progress bar for this job. + * Starts a file analysis NetBeans progress bar in the lower right hand + * corner of the main application window. The progress bar provides the user + * with a task cancellation button. Pressing it cancels the ingest job. + * Analysis already completed at the time that cancellation occurs is NOT + * discarded. */ private void startFileIngestProgressBar() { - if (this.doUI) { - synchronized (this.fileIngestProgressLock) { - String displayName = NbBundle.getMessage(this.getClass(), - "IngestJob.progress.fileIngest.displayName", - this.dataSource.getName()); - this.fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { + if (usingNetBeansGUI) { + synchronized (fileIngestProgressLock) { + String displayName = NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName()); + fileIngestProgressBar = ProgressHandle.createHandle(displayName, new Cancellable() { @Override public boolean cancel() { - // If this method is called, the user has already pressed - // the cancel button on the progress bar and the OK button - // of a cancelation confirmation dialog supplied by - // NetBeans. - IngestJobPipeline.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); + IngestJobExecutor.this.cancel(IngestJob.CancellationReason.USER_CANCELLED); return true; } }); - this.fileIngestProgressBar.start(); - this.fileIngestProgressBar.switchToDeterminate((int) this.estimatedFilesToProcess); + fileIngestProgressBar.start(); + fileIngestProgressBar.switchToDeterminate((int) this.estimatedFilesToProcess); } } } @@ -918,15 +937,15 @@ final class IngestJobPipeline { */ private void checkForStageCompleted() { synchronized (stageTransitionLock) { - if (stage == Stages.FIRST_STAGE_STREAMING) { + if (stage == IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY) { return; } if (taskScheduler.currentTasksAreCompleted(this)) { switch (stage) { - case FIRST_STAGE: - finishFirstStage(); + case FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS: + finishFileAndHighPriorityDataSrcAnalysis(); break; - case SECOND_STAGE: + case LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS: shutDown(); break; } @@ -935,12 +954,13 @@ final class IngestJobPipeline { } /** - * Shuts down the first stage ingest pipelines and progress bars for this - * job and starts the second stage, if appropriate. + * Shuts down the file and high-priority data source level ingest pipelines + * and progress bars for this job and starts the low-priority data source + * level analysis stage, if appropriate. */ - private void finishFirstStage() { + private void finishFileAndHighPriorityDataSrcAnalysis() { synchronized (stageTransitionLock) { - logInfoMessage("Finished first stage analysis"); //NON-NLS + logInfoMessage("Finished file and high-priority data source analysis"); //NON-NLS shutDownIngestModulePipeline(currentDataSourceIngestPipeline); while (!fileIngestPipelinesQueue.isEmpty()) { @@ -948,7 +968,7 @@ final class IngestJobPipeline { shutDownIngestModulePipeline(pipeline); } - if (doUI) { + if (usingNetBeansGUI) { synchronized (dataSourceIngestProgressLock) { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.finish(); @@ -964,8 +984,8 @@ final class IngestJobPipeline { } } - if (!cancelled && hasSecondStageDataSourceIngestModules()) { - startSecondStage(); + if (!jobCancelled && hasLowPriorityDataSourceIngestModules()) { + startLowPriorityDataSourceAnalysis(); } else { shutDown(); } @@ -973,17 +993,17 @@ final class IngestJobPipeline { } /** - * Shuts down the ingest module pipelines and progress bars for this job. + * Shuts down the ingest module pipelines and progress bars. */ private void shutDown() { synchronized (stageTransitionLock) { logInfoMessage("Finished all tasks"); //NON-NLS - stage = IngestJobPipeline.Stages.FINALIZATION; + stage = IngestJobExecutor.IngestJobStage.PIPELINES_SHUT_DOWN; shutDownIngestModulePipeline(currentDataSourceIngestPipeline); shutDownIngestModulePipeline(artifactIngestPipeline); - if (doUI) { + if (usingNetBeansGUI) { synchronized (dataSourceIngestProgressLock) { if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.finish(); @@ -1007,7 +1027,7 @@ final class IngestJobPipeline { } if (ingestJobInfo != null) { - if (cancelled) { + if (jobCancelled) { try { ingestJobInfo.setIngestJobStatus(IngestJobStatusType.CANCELLED); } catch (TskCoreException ex) { @@ -1028,15 +1048,15 @@ final class IngestJobPipeline { } } - ingestJob.notifyIngestPipelineShutDown(); + ingestJob.notifyIngestPipelinesShutDown(); } /** - * Shuts down an ingest task pipeline. + * Shuts down an ingest module pipeline. * * @param pipeline The pipeline. */ - private void shutDownIngestModulePipeline(IngestTaskPipeline pipeline) { + private void shutDownIngestModulePipeline(IngestPipeline pipeline) { if (pipeline.isRunning()) { List errors = new ArrayList<>(); errors.addAll(pipeline.shutDown()); @@ -1048,8 +1068,7 @@ final class IngestJobPipeline { /** * Passes the data source for the ingest job through the currently active - * data source level ingest task pipeline (first stage or second stage data - * source ingest modules). + * data source level ingest module pipeline (high-priority or low-priority). * * @param task A data source ingest task wrapping the data source. */ @@ -1057,7 +1076,7 @@ final class IngestJobPipeline { try { if (!isCancelled()) { List errors = new ArrayList<>(); - errors.addAll(currentDataSourceIngestPipeline.executeTask(task)); + errors.addAll(currentDataSourceIngestPipeline.performTask(task)); if (!errors.isEmpty()) { logIngestModuleErrors(errors); } @@ -1069,8 +1088,8 @@ final class IngestJobPipeline { } /** - * Passes a file from the data source for the ingest job through the file - * ingest task pipeline (file ingest modules). + * Passes a file from the data source for the ingest job through a file + * ingest module pipeline. * * @param task A file ingest task wrapping the file. */ @@ -1097,7 +1116,7 @@ final class IngestJobPipeline { synchronized (fileIngestProgressLock) { ++processedFiles; - if (doUI) { + if (usingNetBeansGUI) { if (processedFiles <= estimatedFilesToProcess) { fileIngestProgressBar.progress(file.getName(), (int) processedFiles); } else { @@ -1111,12 +1130,12 @@ final class IngestJobPipeline { * Run the file through the modules in the pipeline. */ List errors = new ArrayList<>(); - errors.addAll(pipeline.executeTask(task)); + errors.addAll(pipeline.performTask(task)); if (!errors.isEmpty()) { logIngestModuleErrors(errors, file); } - if (doUI && !cancelled) { + if (usingNetBeansGUI && !jobCancelled) { synchronized (fileIngestProgressLock) { /** * Update the file ingest progress bar again, in @@ -1135,7 +1154,7 @@ final class IngestJobPipeline { } } catch (InterruptedException ex) { logger.log(Level.SEVERE, String.format("Unexpected interrupt of file ingest thread during execution of file ingest job (file obj ID = %d)", task.getFileId()), ex); - Thread.currentThread().interrupt(); // Reset thread interrupted flag + Thread.currentThread().interrupt(); } finally { taskScheduler.notifyTaskCompleted(task); checkForStageCompleted(); @@ -1144,15 +1163,15 @@ final class IngestJobPipeline { /** * Passes a data artifact from the data source for the ingest job through - * the data artifact ingest task pipeline (data artifact ingest modules). + * the data artifact ingest module pipeline. * - * @param task A data artifact ingest task wrapping the file. + * @param task A data artifact ingest task wrapping the data artifact. */ void execute(DataArtifactIngestTask task) { try { if (!isCancelled() && !artifactIngestPipeline.isEmpty()) { List errors = new ArrayList<>(); - errors.addAll(artifactIngestPipeline.executeTask(task)); + errors.addAll(artifactIngestPipeline.performTask(task)); if (!errors.isEmpty()) { logIngestModuleErrors(errors); } @@ -1164,15 +1183,15 @@ final class IngestJobPipeline { } /** - * Adds some subset of the streamed files for a streaming mode ingest job to - * this pipeline. + * Adds some streamed files for analysis as part of a streaming mode ingest + * job. * * @param fileObjIds The object IDs of the files. */ void addStreamedFiles(List fileObjIds) { if (hasFileIngestModules()) { - if (stage.equals(Stages.FIRST_STAGE_STREAMING)) { - IngestJobPipeline.taskScheduler.scheduleStreamedFileIngestTasks(this, fileObjIds); + if (stage.equals(IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY)) { + IngestJobExecutor.taskScheduler.scheduleStreamedFileIngestTasks(this, fileObjIds); } else { logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported"); } @@ -1180,15 +1199,13 @@ final class IngestJobPipeline { } /** - * Adds additional files (e.g., extracted or carved files) for any type of - * ingest job to this pipeline after startUp() has been called. Not - * currently supported for second stage of the job. + * Adds additional files (e.g., extracted or carved files) for analysis. * * @param files A list of the files to add. */ void addFiles(List files) { - if (stage.equals(Stages.FIRST_STAGE_STREAMING) - || stage.equals(Stages.FIRST_STAGE)) { + if (stage.equals(IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY) + || stage.equals(IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS)) { taskScheduler.fastTrackFileIngestTasks(this, files); } else { logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported"); @@ -1205,16 +1222,15 @@ final class IngestJobPipeline { } /** - * Adds data artifacts for any type of ingest job to this pipeline after - * startUp() has been called. + * Adds data artifacts for analysis. * * @param artifacts */ void addDataArtifacts(List artifacts) { List artifactsToAnalyze = new ArrayList<>(artifacts); - if (stage.equals(Stages.FIRST_STAGE_STREAMING) - || stage.equals(Stages.FIRST_STAGE) - || stage.equals(Stages.SECOND_STAGE)) { + if (stage.equals(IngestJobStage.STREAMED_FILE_ANALYSIS_ONLY) + || stage.equals(IngestJobStage.FILE_AND_HIGH_PRIORITY_DATA_SRC_LEVEL_ANALYSIS) + || stage.equals(IngestJobStage.LOW_PRIORITY_DATA_SRC_LEVEL_ANALYSIS)) { taskScheduler.scheduleDataArtifactIngestTasks(this, artifactsToAnalyze); } else { logErrorMessage(Level.SEVERE, "Adding streaming files to job during stage " + stage.toString() + " not supported"); @@ -1237,56 +1253,58 @@ final class IngestJobPipeline { * @param displayName The new display name. */ void updateDataSourceIngestProgressBarDisplayName(String displayName) { - if (this.doUI && !this.cancelled) { - synchronized (this.dataSourceIngestProgressLock) { - this.dataSourceIngestProgressBar.setDisplayName(displayName); + if (usingNetBeansGUI && !jobCancelled) { + synchronized (dataSourceIngestProgressLock) { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.setDisplayName(displayName); + } } } } /** - * Switches the data source level ingest progress bar for this job to - * determinate mode. This should be called if the total work units to - * process the data source is known. + * Switches the current data source level ingest progress bar to determinate + * mode. This should be called if the total work units to process the data + * source is known. * * @param workUnits Total number of work units for the processing of the * data source. */ void switchDataSourceIngestProgressBarToDeterminate(int workUnits) { - if (this.doUI && !this.cancelled) { - synchronized (this.dataSourceIngestProgressLock) { - if (null != this.dataSourceIngestProgressBar) { - this.dataSourceIngestProgressBar.switchToDeterminate(workUnits); + if (usingNetBeansGUI && !jobCancelled) { + synchronized (dataSourceIngestProgressLock) { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.switchToDeterminate(workUnits); } } } } /** - * Switches the data source level ingest progress bar for this job to + * Switches the current data source level ingest progress bar to * indeterminate mode. This should be called if the total work units to * process the data source is unknown. */ void switchDataSourceIngestProgressBarToIndeterminate() { - if (this.doUI && !this.cancelled) { - synchronized (this.dataSourceIngestProgressLock) { - if (null != this.dataSourceIngestProgressBar) { - this.dataSourceIngestProgressBar.switchToIndeterminate(); + if (usingNetBeansGUI && !jobCancelled) { + synchronized (dataSourceIngestProgressLock) { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.switchToIndeterminate(); } } } } /** - * Updates the data source level ingest progress bar for this job with the - * number of work units performed, if in the determinate mode. + * Updates the current data source level ingest progress bar with the number + * of work units performed, if in the determinate mode. * * @param workUnits Number of work units performed. */ void advanceDataSourceIngestProgressBar(int workUnits) { - if (doUI && !cancelled) { + if (usingNetBeansGUI && !jobCancelled) { synchronized (dataSourceIngestProgressLock) { - if (null != dataSourceIngestProgressBar) { + if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress("", workUnits); } } @@ -1294,15 +1312,15 @@ final class IngestJobPipeline { } /** - * Updates the data source level ingest progress for this job with a new - * task name, where the task name is the "subtitle" under the display name. + * Updates the current data source level ingest progress bar with a new task + * name, where the task name is the "subtitle" under the display name. * * @param currentTask The task name. */ void advanceDataSourceIngestProgressBar(String currentTask) { - if (doUI && !cancelled) { + if (usingNetBeansGUI && !jobCancelled) { synchronized (dataSourceIngestProgressLock) { - if (null != dataSourceIngestProgressBar) { + if (dataSourceIngestProgressBar != null) { dataSourceIngestProgressBar.progress(currentTask); } } @@ -1310,17 +1328,19 @@ final class IngestJobPipeline { } /** - * Updates the data source level ingest progress bar for this with a new - * task name and the number of work units performed, if in the determinate - * mode. The task name is the "subtitle" under the display name. + * Updates the current data source level ingest progress bar with a new task + * name and the number of work units performed, if in the determinate mode. + * The task name is the "subtitle" under the display name. * * @param currentTask The task name. * @param workUnits Number of work units performed. */ void advanceDataSourceIngestProgressBar(String currentTask, int workUnits) { - if (this.doUI && !this.cancelled) { - synchronized (this.fileIngestProgressLock) { - this.dataSourceIngestProgressBar.progress(currentTask, workUnits); + if (usingNetBeansGUI && !jobCancelled) { + synchronized (dataSourceIngestProgressLock) { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.progress(currentTask, workUnits); + } } } } @@ -1333,20 +1353,20 @@ final class IngestJobPipeline { * @return True or false. */ boolean currentDataSourceIngestModuleIsCancelled() { - return this.currentDataSourceIngestModuleCancelled; + return currentDataSourceIngestModuleCancelled; } /** - * Rescind a temporary cancellation of data source level ingest that was + * Rescinds a temporary cancellation of data source level ingest that was * used to stop a single data source level ingest module for this job. * * @param moduleDisplayName The display name of the module that was stopped. */ void currentDataSourceIngestModuleCancellationCompleted(String moduleDisplayName) { - this.currentDataSourceIngestModuleCancelled = false; - this.cancelledDataSourceIngestModules.add(moduleDisplayName); + currentDataSourceIngestModuleCancelled = false; + cancelledDataSourceIngestModules.add(moduleDisplayName); - if (this.doUI) { + if (usingNetBeansGUI) { /** * A new progress bar must be created because the cancel button of * the previously constructed component is disabled by NetBeans when @@ -1354,10 +1374,10 @@ final class IngestJobPipeline { * dialog popped up by NetBeans when the progress bar cancel button * is pressed. */ - synchronized (this.dataSourceIngestProgressLock) { - this.dataSourceIngestProgressBar.finish(); - this.dataSourceIngestProgressBar = null; - this.startDataSourceIngestProgressBar(); + synchronized (dataSourceIngestProgressLock) { + dataSourceIngestProgressBar.finish(); + dataSourceIngestProgressBar = null; + startDataSourceIngestProgressBar(); } } } @@ -1380,7 +1400,7 @@ final class IngestJobPipeline { * job in order to stop the currently executing data source ingest module. */ void cancelCurrentDataSourceIngestModule() { - this.currentDataSourceIngestModuleCancelled = true; + currentDataSourceIngestModuleCancelled = true; } /** @@ -1390,22 +1410,22 @@ final class IngestJobPipeline { * @param reason The cancellation reason. */ void cancel(IngestJob.CancellationReason reason) { - this.cancelled = true; - this.cancellationReason = reason; - IngestJobPipeline.taskScheduler.cancelPendingFileTasksForIngestJob(this); + jobCancelled = true; + cancellationReason = reason; + IngestJobExecutor.taskScheduler.cancelPendingFileTasksForIngestJob(this); - if (this.doUI) { - synchronized (this.dataSourceIngestProgressLock) { - if (null != dataSourceIngestProgressBar) { - dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", this.dataSource.getName())); - dataSourceIngestProgressBar.progress(NbBundle.getMessage(this.getClass(), "IngestJob.progress.cancelling")); + if (usingNetBeansGUI) { + synchronized (dataSourceIngestProgressLock) { + if (dataSourceIngestProgressBar != null) { + dataSourceIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.dataSourceIngest.initialDisplayName", dataSource.getName())); + dataSourceIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); } } synchronized (this.fileIngestProgressLock) { if (null != this.fileIngestProgressBar) { - this.fileIngestProgressBar.setDisplayName(NbBundle.getMessage(this.getClass(), "IngestJob.progress.fileIngest.displayName", this.dataSource.getName())); - this.fileIngestProgressBar.progress(NbBundle.getMessage(this.getClass(), "IngestJob.progress.cancelling")); + this.fileIngestProgressBar.setDisplayName(NbBundle.getMessage(getClass(), "IngestJob.progress.fileIngest.displayName", dataSource.getName())); + this.fileIngestProgressBar.progress(NbBundle.getMessage(getClass(), "IngestJob.progress.cancelling")); } } } @@ -1417,18 +1437,20 @@ final class IngestJobPipeline { pausedIngestThreads.clear(); } - // If a data source had no tasks in progress it may now be complete. + /* + * If a data source had no tasks in progress it may now be complete. + */ checkForStageCompleted(); } /** - * Queries whether or not cancellation, i.e., a shutdown of the data source + * Queries whether or not cancellation, i.e., a shut down of the data source * level and file level ingest pipelines for this job, has been requested. * * @return True or false. */ boolean isCancelled() { - return this.cancelled; + return jobCancelled; } /** @@ -1437,7 +1459,7 @@ final class IngestJobPipeline { * @return The cancellation reason, may be not cancelled. */ IngestJob.CancellationReason getCancellationReason() { - return this.cancellationReason; + return cancellationReason; } /** @@ -1447,7 +1469,7 @@ final class IngestJobPipeline { * @param message The message. */ private void logInfoMessage(String message) { - logger.log(Level.INFO, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id = %d)", message, this.dataSource.getName(), this.dataSource.getId(), getIngestJobId(), ingestJobInfo.getIngestJobId())); //NON-NLS + logger.log(Level.INFO, String.format("%s (data source = %s, object Id = %d, job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** @@ -1459,7 +1481,7 @@ final class IngestJobPipeline { * @param throwable The throwable associated with the error. */ private void logErrorMessage(Level level, String message, Throwable throwable) { - logger.log(level, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id = %d)", message, this.dataSource.getName(), this.dataSource.getId(), getIngestJobId(), ingestJobInfo.getIngestJobId()), throwable); //NON-NLS + logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id = %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId()), throwable); //NON-NLS } /** @@ -1470,11 +1492,11 @@ final class IngestJobPipeline { * @param message The message. */ private void logErrorMessage(Level level, String message) { - logger.log(level, String.format("%s (data source = %s, objId = %d, pipeline id = %d, ingest job id %d)", message, this.dataSource.getName(), this.dataSource.getId(), getIngestJobId(), ingestJobInfo.getIngestJobId())); //NON-NLS + logger.log(level, String.format("%s (data source = %s, object Id = %d, ingest job id %d)", message, dataSource.getName(), dataSource.getId(), getIngestJobId())); //NON-NLS } /** - * Write ingest module errors to the log. + * Writes ingest module errors to the log. * * @param errors The errors. */ @@ -1497,8 +1519,7 @@ final class IngestJobPipeline { } /** - * Gets a snapshot of some basic diagnostic statistics for this ingest - * pipeline. + * Gets a snapshot of some basic diagnostic statistics. * * @param includeIngestTasksSnapshot Whether or not to include ingest task * stats in the snapshot. @@ -1506,19 +1527,19 @@ final class IngestJobPipeline { * @return The snapshot. */ Snapshot getDiagnosticStatsSnapshot(boolean includeIngestTasksSnapshot) { - /** + /* * Determine whether file ingest is running at the time of this snapshot - * and determine the earliest file ingest level pipeline start time, if + * and determine the earliest file ingest module pipeline start time, if * file ingest was started at all. */ boolean fileIngestRunning = false; Date fileIngestStartTime = null; - for (FileIngestPipeline pipeline : this.fileIngestPipelines) { + for (FileIngestPipeline pipeline : fileIngestPipelines) { if (pipeline.isRunning()) { fileIngestRunning = true; } Date pipelineStartTime = pipeline.getStartTime(); - if (null != pipelineStartTime && (null == fileIngestStartTime || pipelineStartTime.before(fileIngestStartTime))) { + if (pipelineStartTime != null && (fileIngestStartTime == null || pipelineStartTime.before(fileIngestStartTime))) { fileIngestStartTime = pipelineStartTime; } } @@ -1529,8 +1550,8 @@ final class IngestJobPipeline { IngestJobTasksSnapshot tasksSnapshot = null; if (includeIngestTasksSnapshot) { synchronized (fileIngestProgressLock) { - processedFilesCount = this.processedFiles; - estimatedFilesToProcessCount = this.estimatedFilesToProcess; + processedFilesCount = processedFiles; + estimatedFilesToProcessCount = estimatedFilesToProcess; snapShotTime = new Date().getTime(); } tasksSnapshot = taskScheduler.getTasksSnapshotForJob(getIngestJobId()); @@ -1540,7 +1561,7 @@ final class IngestJobPipeline { getIngestJobId(), createTime, getCurrentDataSourceIngestModule(), fileIngestRunning, fileIngestStartTime, - cancelled, cancellationReason, cancelledDataSourceIngestModules, + jobCancelled, cancellationReason, cancelledDataSourceIngestModules, processedFilesCount, estimatedFilesToProcessCount, snapShotTime, tasksSnapshot); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java index a2687d5c1d..2d00727858 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobInputStream.java @@ -56,7 +56,7 @@ class IngestJobInputStream implements IngestStream { if (closed) { throw new IngestStreamClosedException("Can not add files - ingest stream is closed"); } - ingestJob.addStreamingIngestFiles(fileObjectIds); + ingestJob.addStreamedFiles(fileObjectIds); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java index 79b97eafe7..2c87487232 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestManager.java @@ -34,6 +34,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; @@ -72,6 +73,7 @@ import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.Content; +import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.TskCoreException; @@ -288,13 +290,103 @@ public class IngestManager implements IngestProgressSnapshotProvider { /** * Handles artifacts posted events published by the Sleuth Kit layer - * blackboard via the event bus for the case database. + * blackboard via the Sleuth Kit event bus. * - * @param tskEvent A Sleuth Kit data model ArtifactsPostedEvent from the - * case database event bus. + * @param tskEvent The event. */ @Subscribe void handleArtifactsPosted(Blackboard.ArtifactsPostedEvent tskEvent) { + /* + * Add any new data artifacts included in the event to the source ingest + * job for possible analysis. + */ + List newDataArtifacts = new ArrayList<>(); + Collection newArtifacts = tskEvent.getArtifacts(); + for (BlackboardArtifact artifact : newArtifacts) { + if (artifact instanceof DataArtifact) { + newDataArtifacts.add((DataArtifact) artifact); + } + } + if (!newDataArtifacts.isEmpty()) { + IngestJob ingestJob = null; + Optional ingestJobId = tskEvent.getIngestJobId(); + if (ingestJobId.isPresent()) { + synchronized (ingestJobsById) { + ingestJob = ingestJobsById.get(ingestJobId.get()); + } + } else { + /* + * There are four use cases where the ingest job ID returned by + * the event is expected be null: + * + * 1. The artifacts are being posted by a data source proccessor + * (DSP) module that runs before the ingest job is created, + * i.e., a DSP that does not support streaming ingest and has no + * noton of an ingest job ID. In this use case, the event is + * handled synchronously. The DSP calls + * Blackboard.postArtifacts(), which puts the event on the event + * bus to which this method subscribes, so the event will be + * handled here before the DSP completes and calls + * DataSourceProcessorCallback.done(). This means the code below + * will execute before the ingest job is created, so it will not + * find an ingest job to which to add the artifacts. However, + * the artifacts WILL be analyzed after the ingest job is + * started, when the ingest job executor, working in batch mode, + * schedules ingest tasks for all of the data artifacts in the + * case database. There is a slight risk that the wrong ingest + * job will be selected if multiple ingests of the same data + * source are in progress. + * + * 2. The artifacts were posted by an ingest module that either + * has not been updated to use the current + * Blackboard.postArtifacts() API, or is using it incorrectly. + * In this use case, the code below should be able to find the + * ingest job to which to add the artifacts via their data + * source. There is a slight risk that the wrong ingest job will + * be selected if multiple ingests of the same data source are + * in progress. + * + * 3. The portable case generator uses a + * CommunicationArtifactsHelper constructed with a null ingest + * job ID, and the CommunicatonsArtifactHelper posts artifacts. + * Ingest of that data source might be running, in which case + * the data artifact will be analyzed. It also might be analyzed + * by a subsequent ingest job for the data source. This is an + * acceptable edge case. + * + * 4. The user can manually create timeline events with the + * timeline tool, which posts the TSK_TL_EVENT data artifacts. + * The user selects the data source for these artifacts. Ingest + * of that data source might be running, in which case the data + * artifact will be analyzed. It also might be analyzed by a + * subsequent ingest job for the data source. This is an + * acceptable edge case. + */ + DataArtifact dataArtifact = newDataArtifacts.get(0); + try { + Content artifactDataSource = dataArtifact.getDataSource(); + synchronized (ingestJobsById) { + for (IngestJob job : ingestJobsById.values()) { + Content dataSource = job.getDataSource(); + if (artifactDataSource.getId() == dataSource.getId()) { + ingestJob = job; + break; + } + } + } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to get data source for data artifact (object ID = %d)", dataArtifact.getId()), ex); //NON-NLS + } + } + if (ingestJob != null) { + ingestJob.addDataArtifacts(newDataArtifacts); + } + } + + /* + * Publish Autopsy events for the new artifacts, one event per artifact + * type. + */ for (BlackboardArtifact.Type artifactType : tskEvent.getArtifactTypes()) { ModuleDataEvent legacyEvent = new ModuleDataEvent(tskEvent.getModuleName(), artifactType, tskEvent.getArtifacts(artifactType)); AutopsyEvent autopsyEvent = new BlackboardPostEvent(legacyEvent); @@ -825,7 +917,7 @@ public class IngestManager implements IngestProgressSnapshotProvider { */ void setIngestTaskProgress(DataSourceIngestTask task, String currentModuleName) { IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId()); - IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource()); + IngestThreadActivitySnapshot newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource()); ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap); /* @@ -847,10 +939,10 @@ public class IngestManager implements IngestProgressSnapshotProvider { IngestThreadActivitySnapshot prevSnap = ingestThreadActivitySnapshots.get(task.getThreadId()); IngestThreadActivitySnapshot newSnap; try { - newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile()); + newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource(), task.getFile()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error getting file from file ingest task", ex); - newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobPipeline().getIngestJobId(), currentModuleName, task.getDataSource()); + newSnap = new IngestThreadActivitySnapshot(task.getThreadId(), task.getIngestJobExecutor().getIngestJobId(), currentModuleName, task.getDataSource()); } ingestThreadActivitySnapshots.put(task.getThreadId(), newSnap); diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipeline.java similarity index 67% rename from Core/src/org/sleuthkit/autopsy/ingest/IngestTaskPipeline.java rename to Core/src/org/sleuthkit/autopsy/ingest/IngestPipeline.java index c7a58ac094..7049eaed79 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTaskPipeline.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipeline.java @@ -33,21 +33,24 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; /** - * An abstract superclass for pipelines of ingest modules that execute ingest - * tasks for an ingest job. Subclasses need to extend this class and to - * implement a specialization of the inner PipelineModule abstract superclass. + * An abstract superclass for pipelines of ingest modules that perform the + * ingest tasks that make up an ingest job. A pipeline performs a task by + * passing it sequentially to the process() method of each module in the + * pipeline. * - * NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use - * by one thread at a time. There are a few status fields that are volatile to - * ensure visibility to threads making ingest progress snapshots, but methods - * such as startUp(), executeTask() and shutDown() are not synchronized. - * - * @param The ingest task type. + * @param The type of ingest tasks the pipeline performs. */ -abstract class IngestTaskPipeline { +abstract class IngestPipeline { - private static final Logger logger = Logger.getLogger(IngestTaskPipeline.class.getName()); - private final IngestJobPipeline ingestJobPipeline; + /* + * NOTE ON MULTI-THREADING POLICY: This class is primarily designed for use + * by one thread at a time. There are a few status fields that are volatile + * to ensure visibility to threads making ingest progress snapshots, but + * methods such as startUp(), performTask() and shutDown() are not + * synchronized. + */ + private static final Logger logger = Logger.getLogger(IngestPipeline.class.getName()); + private final IngestJobExecutor ingestJobExecutor; private final List moduleTemplates; private final List> modules; private volatile Date startTime; @@ -56,38 +59,34 @@ abstract class IngestTaskPipeline { /** * Constructs the superclass part of a pipeline of ingest modules that - * executes ingest tasks for an ingest job. + * performs ingest tasks for an ingest job. * - * @param ingestPipeline The parent ingest job pipeline for this ingest - * task pipeline. - * @param moduleTemplates The ingest module templates that define this - * ingest task pipeline. May be an empty list. + * @param ingestJobExecutor The ingest job executor for this pipeline. + * @param moduleTemplates The ingest module templates to be used to + * construct the ingest modules for this pipeline. + * May be an empty list if this type of pipeline is + * not needed for the ingest job. */ - IngestTaskPipeline(IngestJobPipeline ingestPipeline, List moduleTemplates) { - this.ingestJobPipeline = ingestPipeline; - /* - * The creation of ingest modules from the ingest module templates has - * been deliberately deferred to the startUp() method so that any and - * all errors in module construction or start up can be reported to the - * client code. - */ + IngestPipeline(IngestJobExecutor ingestJobExecutor, List moduleTemplates) { + this.ingestJobExecutor = ingestJobExecutor; this.moduleTemplates = moduleTemplates; modules = new ArrayList<>(); } /** - * Indicates whether or not there are any ingest modules in this ingest task + * Indicates whether or not there are any ingest modules in this ingest * pipeline. * - * @return True or false. + * @return True or false; always true before startUp() is called. */ boolean isEmpty() { return modules.isEmpty(); } /** - * Queries whether or not this ingest task pipeline is running, i.e., the - * startUp() method has been called and the shutDown() has not been called. + * Queries whether or not this ingest pipeline is running, i.e., the + * startUp() method has been called and the shutDown() method has not been + * called yet. * * @return True or false. */ @@ -96,8 +95,8 @@ abstract class IngestTaskPipeline { } /** - * Starts up this ingest task pipeline by calling the startUp() methods of - * the ingest modules in the pipeline. + * Starts up this ingest pipeline by calling the startUp() methods of the + * ingest modules in the pipeline. * * @return A list of ingest module start up errors, possibly empty. */ @@ -110,21 +109,19 @@ abstract class IngestTaskPipeline { * any and all errors in module construction or start up can be * reported to the client code. */ - createIngestModules(moduleTemplates); + createIngestModules(); errors.addAll(startUpIngestModules()); } else { - errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline already started"))); //NON-NLS + errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline already started"))); //NON-NLS } return errors; } /** - * Creates the ingest modules for this ingest task pipeline from the given - * ingest module templates. - * - * @param moduleTemplates The ingest module templates. + * Creates the ingest modules for this ingest pipeline using its ingest + * module templates. */ - private void createIngestModules(List moduleTemplates) { + private void createIngestModules() { if (modules.isEmpty()) { for (IngestModuleTemplate template : moduleTemplates) { Optional> module = acceptModuleTemplate(template); @@ -137,8 +134,8 @@ abstract class IngestTaskPipeline { /** * Determines if one of the types of ingest modules that can be created from - * a given ingest module template should be added to this ingest task - * pipeline. If so, the ingest module is created and returned. + * a given ingest module template should be added to this ingest pipeline. + * If so, the ingest module is created and returned. * * @param template The ingest module template to be used or ignored, as * appropriate to the pipeline type. @@ -149,7 +146,7 @@ abstract class IngestTaskPipeline { abstract Optional> acceptModuleTemplate(IngestModuleTemplate template); /** - * Starts up the ingest modules in this ingest task pipeline. + * Starts up the ingest modules in this ingest pipeline. * * @return A list of ingest module start up errors, possibly empty. */ @@ -159,7 +156,7 @@ abstract class IngestTaskPipeline { running = true; for (PipelineModule module : modules) { try { - module.startUp(new IngestJobContext(ingestJobPipeline)); + module.startUp(new IngestJobContext(ingestJobExecutor)); } catch (Throwable ex) { /* * A catch-all exception firewall. Start up errors for all of @@ -174,10 +171,10 @@ abstract class IngestTaskPipeline { } /** - * Returns the start up time of this ingest task pipeline. + * Returns the start up time of this ingest pipeline. * - * @return The file processing start time, may be null if this pipeline has - * not been started yet. + * @return The start up time, may be null if this pipeline has not been + * started yet. */ Date getStartTime() { Date reportedStartTime = null; @@ -188,65 +185,66 @@ abstract class IngestTaskPipeline { } /** - * Executes an ingest task by calling the process() methods of the ingest - * modules in this ingest task pipeline. + * Performs an ingest task by sequentially calling the process() methods of + * the ingest modules in this ingest pipeline. * * @param task The task. * - * @return A list of ingest module task processing errors, possibly empty. + * @return A list of ingest module processing errors, possibly empty. */ - List executeTask(T task) { + List performTask(T task) { List errors = new ArrayList<>(); if (running) { - if (!ingestJobPipeline.isCancelled()) { + if (!ingestJobExecutor.isCancelled()) { pauseIfScheduled(); - if (ingestJobPipeline.isCancelled()) { + if (ingestJobExecutor.isCancelled()) { return errors; } try { prepareForTask(task); - } catch (IngestTaskPipelineException ex) { + } catch (IngestPipelineException ex) { errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS return errors; } for (PipelineModule module : modules) { pauseIfScheduled(); - if (ingestJobPipeline.isCancelled()) { + if (ingestJobExecutor.isCancelled()) { break; } try { currentModule = module; currentModule.setProcessingStartTime(); - module.executeTask(ingestJobPipeline, task); - } catch (Throwable ex) { + module.process(ingestJobExecutor, task); + } catch (Throwable ex) { // Catch-all exception firewall /* - * A catch-all exception firewall. Note that a runtime - * exception from a single module does not stop + * Note that an exception from a module does not stop * processing of the task by the other modules in the * pipeline. */ errors.add(new IngestModuleError(module.getDisplayName(), ex)); } - if (ingestJobPipeline.isCancelled()) { + if (ingestJobExecutor.isCancelled()) { break; } } } try { cleanUpAfterTask(task); - } catch (IngestTaskPipelineException ex) { + } catch (IngestPipelineException ex) { errors.add(new IngestModuleError("Ingest Task Pipeline", ex)); //NON-NLS } } else { - errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestTaskPipelineException("Pipeline not started or shut down"))); //NON-NLS + errors.add(new IngestModuleError("Ingest Task Pipeline", new IngestPipelineException("Pipeline not started or shut down"))); //NON-NLS } currentModule = null; return errors; } /** - * Pauses task execution if ingest has been configured to be paused weekly - * at a specified time for a specified duration. + * Pauses this pipeline if ingest has been configured to be paused weekly at + * a specified time, for a specified duration. A pipeline can only be paused + * between calls to module process() methods, i.e., the individual modules + * themselves cannot be paused in the middle of processing a task. */ private void pauseIfScheduled() { if (ScheduledIngestPauseSettings.getPauseEnabled() == true) { @@ -278,7 +276,7 @@ abstract class IngestTaskPipeline { */ LocalDateTime timeNow = LocalDateTime.now(); if ((timeNow.equals(pauseStart) || timeNow.isAfter(pauseStart)) && timeNow.isBefore(pauseEnd)) { - ingestJobPipeline.registerPausedIngestThread(Thread.currentThread()); + ingestJobExecutor.registerPausedIngestThread(Thread.currentThread()); try { long timeRemainingMillis = ChronoUnit.MILLIS.between(timeNow, pauseEnd); logger.log(Level.INFO, String.format("%s pausing at %s for ~%d minutes", Thread.currentThread().getName(), LocalDateTime.now(), TimeUnit.MILLISECONDS.toMinutes(timeRemainingMillis))); @@ -287,27 +285,27 @@ abstract class IngestTaskPipeline { } catch (InterruptedException notLogged) { logger.log(Level.INFO, String.format("%s resuming at %s due to sleep interrupt (ingest job canceled)", Thread.currentThread().getName(), LocalDateTime.now())); } finally { - ingestJobPipeline.unregisterPausedIngestThread(Thread.currentThread()); + ingestJobExecutor.unregisterPausedIngestThread(Thread.currentThread()); } } } } /** - * Does any task type specific preparation required before executing an + * Does any task-type-specific preparation required before performing an * ingest task. * * @param task The task. * - * @throws IngestTaskPipelineException Thrown if there is an error preparing - * to execute the task. + * @throws IngestPipelineException Thrown if there is an error preparing to + * perform the task. */ - abstract void prepareForTask(T task) throws IngestTaskPipelineException; + abstract void prepareForTask(T task) throws IngestPipelineException; /** * Gets the currently running ingest module. * - * @return The module, possibly null if no module is currently running. + * @return The module, possibly null, if no module is currently running. */ PipelineModule getCurrentlyRunningModule() { return currentModule; @@ -345,22 +343,19 @@ abstract class IngestTaskPipeline { } /** - * Does any task type specific clean up required after executing an ingest + * Does any task-type-specific clean up required after performing an ingest * task. * * @param task The task. * - * @throws IngestTaskPipelineException Thrown if there is an error cleaning - * up after performing the task. + * @throws IngestPipelineException Thrown if there is an error cleaning up + * after performing the task. */ - abstract void cleanUpAfterTask(T task) throws IngestTaskPipelineException; + abstract void cleanUpAfterTask(T task) throws IngestPipelineException; /** - * An abstract superclass for a decorator that adds ingest infrastructure - * operations to an ingest module. - * - * IMPORTANT: Subclasses of IngestTaskPipeline need to implement a - * specialization this class + * An abstract superclass for an ingest module decorator that adds ingest + * infrastructure operations to an ingest module. */ static abstract class PipelineModule implements IngestModule { @@ -369,16 +364,17 @@ abstract class IngestTaskPipeline { private volatile Date processingStartTime; /** - * Constructs an instance of an abstract superclass for a decorator that - * adds ingest infrastructure operations to an ingest module. + * Constructs an instance of an abstract superclass for an ingest module + * decorator that adds ingest infrastructure operations to an ingest + * module. * - * @param module The ingest module to be wrapped. + * @param module The ingest module to be decorated. * @param displayName The display name for the module. */ PipelineModule(IngestModule module, String displayName) { this.module = module; this.displayName = displayName; - this.processingStartTime = new Date(); + processingStartTime = new Date(); } /** @@ -410,8 +406,8 @@ abstract class IngestTaskPipeline { /** * Gets the the processing start time for the decorated module. * - * @return The start time, will be null if the module has not started - * processing the data source yet. + * @return The start time, not valid if setProcessingStartTime() has not + * been called first. */ Date getProcessingStartTime() { return new Date(processingStartTime.getTime()); @@ -423,17 +419,17 @@ abstract class IngestTaskPipeline { } /** - * Executes an ingest task using the process() method of the decorated + * Performs an ingest task using the process() method of the decorated * module. * - * @param ingestJobPipeline The ingest job pipeline that owns the ingest - * task pipeline this module belongs to. - * @param task The task to execute. + * @param ingestJobExecutor The ingest job executor that owns the ingest + * pipeline to which this module belongs. + * @param task The task to perform. * * @throws IngestModuleException Exception thrown if there is an error * performing the task. */ - abstract void executeTask(IngestJobPipeline ingestJobPipeline, T task) throws IngestModuleException; + abstract void process(IngestJobExecutor ingestJobExecutor, T task) throws IngestModuleException; @Override public void shutDown() { @@ -443,28 +439,28 @@ abstract class IngestTaskPipeline { } /** - * An exception thrown by an ingest task pipeline. + * An exception thrown by an ingest pipeline. */ - public static class IngestTaskPipelineException extends Exception { + static class IngestPipelineException extends Exception { private static final long serialVersionUID = 1L; /** - * Constructs an exception to be thrown by an ingest task pipeline. + * Constructs an exception to be thrown by an ingest pipeline. * * @param message The exception message. */ - public IngestTaskPipelineException(String message) { + IngestPipelineException(String message) { super(message); } /** - * Constructs an exception to be thrown by an ingest task pipeline. + * Constructs an exception to be thrown by an ingest pipeline. * * @param message The exception message. * @param cause The exception cause. */ - public IngestTaskPipelineException(String message, Throwable cause) { + IngestPipelineException(String message, Throwable cause) { super(message, cause); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java index e2dd585582..ede9a6ea83 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java @@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.SleuthkitCase; */ public final class IngestServices { - private static Logger logger = Logger.getLogger(IngestServices.class.getName()); + private final static Logger logger = Logger.getLogger(IngestServices.class.getName()); private static IngestServices instance = null; /** @@ -115,7 +115,7 @@ public final class IngestServices { public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { try { Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName()); + blackboard.postArtifacts(moduleDataEvent.getArtifacts(), moduleDataEvent.getModuleName(), null); } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Failed to post artifacts", ex); } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java index 41c3736986..fcca0dc629 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTask.java @@ -22,37 +22,35 @@ import org.sleuthkit.datamodel.Content; /** * An ingest task that will be executed by an ingest thread using a given ingest - * job pipeline. Three examples of concrete types of ingest tasks are tasks to + * job executor. Three examples of concrete types of ingest tasks are tasks to * analyze a data source, tasks to analyze the files in a data source, and tasks - * that analyze data artifacts. + * to analyze data artifacts. */ abstract class IngestTask { private final static long NOT_SET = Long.MIN_VALUE; - private final IngestJobPipeline ingestJobPipeline; + private final IngestJobExecutor ingestJobExecutor; private long threadId; /** * Constructs an ingest task that will be executed by an ingest thread using - * a given ingest job pipeline. Three examples of concrete types of ingest - * tasks are tasks to analyze a data source, tasks to analyze the files in a - * data source, and tasks that analyze data artifacts. + * a given ingest job executor. * - * @param ingestJobPipeline The ingest job pipeline to use to execute the + * @param ingestJobExecutor The ingest job executor to use to execute the * task. */ - IngestTask(IngestJobPipeline ingestJobPipeline) { - this.ingestJobPipeline = ingestJobPipeline; + IngestTask(IngestJobExecutor ingestJobExecutor) { + this.ingestJobExecutor = ingestJobExecutor; threadId = NOT_SET; } /** - * Gets the ingest job pipeline used to complete this task. + * Gets the ingest job executor to use to execute this task. * - * @return The ingest job pipeline. + * @return The ingest job executor. */ - IngestJobPipeline getIngestJobPipeline() { - return ingestJobPipeline; + IngestJobExecutor getIngestJobExecutor() { + return ingestJobExecutor; } /** @@ -61,7 +59,7 @@ abstract class IngestTask { * @return The data source. */ Content getDataSource() { - return getIngestJobPipeline().getDataSource(); + return getIngestJobExecutor().getDataSource(); } /** @@ -84,8 +82,8 @@ abstract class IngestTask { /** * Records the ingest thread ID of the calling thread and executes this task - * using the ingest job pipeline specified when the task was created. The - * implementation of the method should simple call + * using the ingest job executor specified when the task was created. The + * implementation of the method should simply call * super.setThreadId(threadId) and getIngestJobPipeline().process(this). * * @param threadId The numeric ID of the ingest thread executing this task. diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java index fdec6e746b..63d45ae3e0 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestTasksScheduler.java @@ -138,7 +138,7 @@ final class IngestTasksScheduler { * task to the pipeline for processing by the * pipeline's ingest modules. */ - synchronized void scheduleIngestTasks(IngestJobPipeline ingestPipeline) { + synchronized void scheduleIngestTasks(IngestJobExecutor ingestPipeline) { if (!ingestPipeline.isCancelled()) { if (ingestPipeline.hasDataSourceIngestModules()) { scheduleDataSourceIngestTask(ingestPipeline); @@ -163,7 +163,7 @@ final class IngestTasksScheduler { * task to the pipeline for processing by the * pipeline's ingest modules. */ - synchronized void scheduleDataSourceIngestTask(IngestJobPipeline ingestPipeline) { + synchronized void scheduleDataSourceIngestTask(IngestJobExecutor ingestPipeline) { if (!ingestPipeline.isCancelled()) { DataSourceIngestTask task = new DataSourceIngestTask(ingestPipeline); try { @@ -190,7 +190,7 @@ final class IngestTasksScheduler { * empty, then all if the files from the data source * are candidates for scheduling. */ - synchronized void scheduleFileIngestTasks(IngestJobPipeline ingestPipeline, Collection files) { + synchronized void scheduleFileIngestTasks(IngestJobExecutor ingestPipeline, Collection files) { if (!ingestPipeline.isCancelled()) { Collection candidateFiles; if (files.isEmpty()) { @@ -220,7 +220,7 @@ final class IngestTasksScheduler { * processing by the pipeline's ingest modules. * @param files A list of file object IDs for the streamed files. */ - synchronized void scheduleStreamedFileIngestTasks(IngestJobPipeline ingestPipeline, List fileIds) { + synchronized void scheduleStreamedFileIngestTasks(IngestJobExecutor ingestPipeline, List fileIds) { if (!ingestPipeline.isCancelled()) { for (long id : fileIds) { /* @@ -252,7 +252,7 @@ final class IngestTasksScheduler { * processing by the pipeline's ingest modules. * @param files The files. */ - synchronized void fastTrackFileIngestTasks(IngestJobPipeline ingestPipeline, Collection files) { + synchronized void fastTrackFileIngestTasks(IngestJobExecutor ingestPipeline, Collection files) { if (!ingestPipeline.isCancelled()) { /* * Put the files directly into the queue for the file ingest @@ -290,7 +290,7 @@ final class IngestTasksScheduler { * target Content of the task to the pipeline for * processing by the pipeline's ingest modules. */ - synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline) { + synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline) { if (!ingestPipeline.isCancelled()) { Blackboard blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard(); try { @@ -318,7 +318,7 @@ final class IngestTasksScheduler { * source; if empty, then all of the data artifacts * from the data source will be scheduled. */ - synchronized void scheduleDataArtifactIngestTasks(IngestJobPipeline ingestPipeline, List artifacts) { + synchronized void scheduleDataArtifactIngestTasks(IngestJobExecutor ingestPipeline, List artifacts) { if (!ingestPipeline.isCancelled()) { for (DataArtifact artifact : artifacts) { DataArtifactIngestTask task = new DataArtifactIngestTask(ingestPipeline, artifact); @@ -373,7 +373,7 @@ final class IngestTasksScheduler { * * @return True or false. */ - synchronized boolean currentTasksAreCompleted(IngestJobPipeline ingestPipeline) { + synchronized boolean currentTasksAreCompleted(IngestJobExecutor ingestPipeline) { long pipelineId = ingestPipeline.getIngestJobId(); return !(dataSourceIngestTasksQueue.hasTasksForJob(pipelineId) || hasTasksForJob(topLevelFileIngestTasksQueue, pipelineId) @@ -402,7 +402,7 @@ final class IngestTasksScheduler { * * @param ingestJobPipeline The ingest pipeline for the job. */ - synchronized void cancelPendingFileTasksForIngestJob(IngestJobPipeline ingestJobPipeline) { + synchronized void cancelPendingFileTasksForIngestJob(IngestJobExecutor ingestJobPipeline) { long jobId = ingestJobPipeline.getIngestJobId(); removeTasksForJob(topLevelFileIngestTasksQueue, jobId); removeTasksForJob(batchedFileIngestTasksQueue, jobId); @@ -549,7 +549,7 @@ final class IngestTasksScheduler { for (Content child : file.getChildren()) { if (child instanceof AbstractFile) { AbstractFile childFile = (AbstractFile) child; - FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobPipeline(), childFile); + FileIngestTask childTask = new FileIngestTask(nextTask.getIngestJobExecutor(), childFile); if (childFile.hasChildren()) { batchedFileIngestTasksQueue.add(childTask); } else if (shouldEnqueueFileTask(childTask)) { @@ -668,7 +668,7 @@ final class IngestTasksScheduler { private static boolean shouldBeCarved(final FileIngestTask task) { try { AbstractFile file = task.getFile(); - return task.getIngestJobPipeline().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS); + return task.getIngestJobExecutor().shouldProcessUnallocatedSpace() && file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS); } catch (TskCoreException ex) { return false; } @@ -685,7 +685,7 @@ final class IngestTasksScheduler { private static boolean fileAcceptedByFilter(final FileIngestTask task) { try { AbstractFile file = task.getFile(); - return !(task.getIngestJobPipeline().getFileIngestFilter().fileIsMemberOf(file) == null); + return !(task.getIngestJobExecutor().getFileIngestFilter().fileIsMemberOf(file) == null); } catch (TskCoreException ex) { return false; } @@ -702,7 +702,7 @@ final class IngestTasksScheduler { */ synchronized private static boolean hasTasksForJob(Collection tasks, long pipelineId) { for (IngestTask task : tasks) { - if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { + if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) { return true; } } @@ -720,7 +720,7 @@ final class IngestTasksScheduler { Iterator iterator = tasks.iterator(); while (iterator.hasNext()) { IngestTask task = iterator.next(); - if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { + if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) { iterator.remove(); } } @@ -738,7 +738,7 @@ final class IngestTasksScheduler { private static int countTasksForJob(Collection tasks, long pipelineId) { int count = 0; for (IngestTask task : tasks) { - if (task.getIngestJobPipeline().getIngestJobId() == pipelineId) { + if (task.getIngestJobExecutor().getIngestJobId() == pipelineId) { count++; } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/events/BlackboardPostEvent.java b/Core/src/org/sleuthkit/autopsy/ingest/events/BlackboardPostEvent.java index 7d57b420ab..55400f6d63 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/events/BlackboardPostEvent.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/events/BlackboardPostEvent.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2015-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,8 +35,9 @@ import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.TskCoreException; /** - * Event published when new data is posted to the blackboard of a case. The - * "old" value is a legacy ModuleDataEvent object. The "new" value is null. + * An event published when a new artifact (data artifact or analysis result) is + * posted to the blackboard. The "old" value is a legacy ModuleDataEvent object. + * The "new" value is null. */ public final class BlackboardPostEvent extends AutopsyEvent implements Serializable { @@ -45,14 +46,15 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa private transient ModuleDataEvent eventData; /** - * Constructs an event to be published when new content is added to a case - * or there is a change a recorded attribute of existing content. + * Constructs an event published when a new artifact (data artifact or + * analysis result) is posted to the blackboard. The "old" value is a legacy + * ModuleDataEvent object. The "new" value is null. * * @param eventData A ModuleDataEvent object containing the data associated * with the blackboard post. */ public BlackboardPostEvent(ModuleDataEvent eventData) { - /** + /* * Putting a serializable data holding object into oldValue to allow for * lazy loading of the ModuleDataEvent object for remote events. This * bypasses the issues related to the serialization and de-serialization @@ -63,9 +65,9 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa IngestManager.IngestModuleEvent.DATA_ADDED.toString(), new SerializableEventData(eventData.getModuleName(), eventData.getBlackboardArtifactType(), eventData.getArtifacts() != null ? eventData.getArtifacts() - .stream() - .map(BlackboardArtifact::getArtifactID) - .collect(Collectors.toList()) : Collections.emptyList()), + .stream() + .map(BlackboardArtifact::getArtifactID) + .collect(Collectors.toList()) : Collections.emptyList()), null ); this.eventData = eventData; @@ -78,13 +80,13 @@ public final class BlackboardPostEvent extends AutopsyEvent implements Serializa */ @Override public Object getOldValue() { - /** - * The eventData field is set in the constructor, but it is transient so - * it will become null when the event is serialized for publication over - * a network. Doing a lazy load of the ModuleDataEvent object bypasses - * the issues related to the serialization and de-serialization of - * BlackboardArtifact objects and may also save database round trips - * from other nodes since subscribers to this event are often not + /* + * The eventData field is set in the constructor, but it is transient, + * so it will become null when the event is serialized for publication + * over a network. Doing a lazy load of the ModuleDataEvent object + * bypasses the issues related to the serialization and de-serialization + * of BlackboardArtifact objects and may also save database round trips + * from other hosts since subscribers to this event are often not * interested in the event data. */ if (null != eventData) { diff --git a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java index eb52c88baf..ceb0ebbde6 100644 --- a/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java +++ b/Core/src/org/sleuthkit/autopsy/logicalimager/dsp/AddLogicalImageTask.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -513,8 +513,7 @@ final class AddLogicalImageTask implements Runnable { private void postArtifacts(List artifacts) { try { - // index the artifact for keyword search - blackboard.postArtifacts(artifacts, MODULE_NAME); + blackboard.postArtifacts(artifacts, MODULE_NAME, null); } catch (Blackboard.BlackboardException ex) { LOGGER.log(Level.SEVERE, "Unable to post artifacts to blackboard", ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java index ec614e56b0..2f83f3d827 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013-2019 Basis Technology Corp. + * Copyright 2013-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -303,7 +303,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { .getAnalysisResult(); Case.getCurrentCase().getServices().getArtifactsBlackboard() - .postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); + .postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName(), context.getJobId()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error creating verification failed artifact", ex); } catch (Blackboard.BlackboardException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/modules/drones/DATExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/drones/DATExtractor.java index 3966317d15..dc468db858 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/drones/DATExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/drones/DATExtractor.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -114,7 +114,7 @@ final class DATExtractor extends DroneExtractor { GeoTrackPoints trackPoints = processCSVFile(context, DATFile, csvFilePath); if (trackPoints != null && !trackPoints.isEmpty()) { - (new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile)).addTrack(DATFile.getName(), trackPoints, null); + (new GeoArtifactsHelper(getSleuthkitCase(), getName(), "DatCon", DATFile, context.getJobId())).addTrack(DATFile.getName(), trackPoints, null); } else { logger.log(Level.INFO, String.format("No trackpoints with valid longitude or latitude found in %s", DATFile.getName())); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java index 75cef1a8f1..574b034ab4 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java @@ -341,7 +341,7 @@ class SevenZipExtractor { * keyword search, and fire an event to notify UI of this * new artifact */ - blackboard.postArtifact(artifact, MODULE_NAME); + blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId()); String msg = NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS @@ -870,7 +870,7 @@ class SevenZipExtractor { * keyword search, and fire an event to notify UI of this * new artifact */ - blackboard.postArtifact(artifact, MODULE_NAME); + blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error( diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java index 5eb6f91e12..95b3dc7132 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -172,7 +172,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges * post the artifact which will index the artifact for keyword * search, and fire an event to notify UI of this new artifact */ - blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); + blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java index 81e7c1877e..3d08163c9e 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2018 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -212,7 +212,7 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter * post the artifact which will index the artifact for keyword * search, and fire an event to notify UI of this new artifact */ - blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); + blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName(), context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java index 586543c003..0d3f40bd3e 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -155,7 +155,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule { * keyword search, and fire an event to notify UI of this * new artifact */ - blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName()); + blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName(), jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message()); diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 0288d5b463..734eedaf35 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2013-2018 Basis Technology Corp. + * Copyright 2013-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -206,7 +206,7 @@ public class FileTypeIdIngestModule implements FileIngestModule { * keyword search, and fire an event to notify UI of this * new artifact */ - tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName()); + tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName(), jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java index 8b6cc47d79..c89e7893c6 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.concurrent.atomic.AtomicLong; @@ -43,13 +42,11 @@ import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.HashHitInfo; import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.Score; -import org.sleuthkit.datamodel.Score.Significance; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; @@ -552,7 +549,7 @@ public class HashDbIngestModule implements FileIngestModule { * post the artifact which will index the artifact for keyword * search, and fire an event to notify UI of this new artifact */ - blackboard.postArtifact(badFile, moduleName); + blackboard.postArtifact(badFile, moduleName, jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error( diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java index d447f204d5..ec259e933a 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2014-2018 Basis Technology Corp. + * Copyright 2014-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -151,7 +151,7 @@ final class FilesIdentifierIngestModule implements FileIngestModule { try { // Post thet artifact to the blackboard. - blackboard.postArtifact(artifact, MODULE_NAME); + blackboard.postArtifact(artifact, MODULE_NAME, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName()); diff --git a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ALeappAnalyzerIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ALeappAnalyzerIngestModule.java index 3409098d95..06f367d691 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ALeappAnalyzerIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ALeappAnalyzerIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -101,7 +101,7 @@ public class ALeappAnalyzerIngestModule implements DataSourceIngestModule { } try { - aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName()); + aLeappFileProcessor = new LeappFileProcessor(XMLFILE, ALeappAnalyzerModuleFactory.getModuleName(), context); } catch (IOException | IngestModuleException | NoCurrentCaseException ex) { throw new IngestModuleException(Bundle.ALeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex); } diff --git a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ILeappAnalyzerIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ILeappAnalyzerIngestModule.java index 339be316a9..c3afd036d8 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ILeappAnalyzerIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/ILeappAnalyzerIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -101,7 +101,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule { } try { - iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName()); + iLeappFileProcessor = new LeappFileProcessor(XMLFILE, ILeappAnalyzerModuleFactory.getModuleName(), context); } catch (IOException | IngestModuleException | NoCurrentCaseException ex) { throw new IngestModuleException(Bundle.ILeappAnalyzerIngestModule_error_ileapp_file_processor_init(), ex); } @@ -333,7 +333,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule { * added to reports */ private void addILeappReportToReports(Path iLeappOutputDir, Case currentCase) { - List allIndexFiles = new ArrayList<>(); + List allIndexFiles; try (Stream walk = Files.walk(iLeappOutputDir)) { @@ -402,7 +402,7 @@ public class ILeappAnalyzerIngestModule implements DataSourceIngestModule { String fileName = FilenameUtils.getName(ffp); String filePath = FilenameUtils.getPath(ffp); - List iLeappFiles = new ArrayList<>(); + List iLeappFiles; try { if (filePath.isEmpty()) { iLeappFiles = fileManager.findFiles(dataSource, fileName); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/LeappFileProcessor.java b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/LeappFileProcessor.java index 06ee9efe4c..8b500820d2 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/LeappFileProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/leappanalyzers/LeappFileProcessor.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -61,6 +61,7 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.NetworkUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.datamodel.AbstractFile; @@ -108,10 +109,10 @@ public final class LeappFileProcessor { * Main constructor. * * @param attributeType The BlackboardAttribute type or null if not - * used. used. - * @param columnName The name of the column in the tsv file. - * @param required Whether or not this attribute is required to be - * present. + * used. used. + * @param columnName The name of the column in the tsv file. + * @param required Whether or not this attribute is required to be + * present. */ TsvColumn(BlackboardAttribute.Type attributeType, String columnName, boolean required) { this.attributeType = attributeType; @@ -144,6 +145,7 @@ public final class LeappFileProcessor { private static final Logger logger = Logger.getLogger(LeappFileProcessor.class.getName()); private final String xmlFile; //NON-NLS private final String moduleName; + private final IngestJobContext context; private final Map tsvFiles; private final Map tsvFileArtifacts; @@ -192,15 +194,16 @@ public final class LeappFileProcessor { .put("call history.tsv", "calllog") .build(); - Blackboard blkBoard; + private final Blackboard blkBoard; - public LeappFileProcessor(String xmlFile, String moduleName) throws IOException, IngestModuleException, NoCurrentCaseException { + public LeappFileProcessor(String xmlFile, String moduleName, IngestJobContext context) throws IOException, IngestModuleException, NoCurrentCaseException { this.tsvFiles = new HashMap<>(); this.tsvFileArtifacts = new HashMap<>(); this.tsvFileArtifactComments = new HashMap<>(); this.tsvFileAttributes = new HashMap<>(); this.xmlFile = xmlFile; this.moduleName = moduleName; + this.context = context; blkBoard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); @@ -218,7 +221,8 @@ public final class LeappFileProcessor { "LeappFileProcessor.has.run=Leapp", "LeappFileProcessor.Leapp.cancelled=Leapp run was canceled", "LeappFileProcessor.completed=Leapp Processing Completed", - "LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory"}) + "LeappFileProcessor.error.reading.Leapp.directory=Error reading Leapp Output Directory" + }) public ProcessResult processFiles(Content dataSource, Path moduleOutputPath, AbstractFile LeappFile) { try { List LeappTsvOutputFiles = findTsvFiles(moduleOutputPath); @@ -249,7 +253,7 @@ public final class LeappFileProcessor { * we know we want to process and return the list to process those files. */ private List findTsvFiles(Path LeappOutputDir) throws IngestModuleException { - List allTsvFiles = new ArrayList<>(); + List allTsvFiles; List foundTsvFiles = new ArrayList<>(); try (Stream walk = Files.walk(LeappOutputDir)) { @@ -275,7 +279,7 @@ public final class LeappFileProcessor { * Process the Leapp files that were found that match the xml mapping file * * @param LeappFilesToProcess List of files to process - * @param LeappImageFile Abstract file to create artifact for + * @param LeappImageFile Abstract file to create artifact for * * @throws FileNotFoundException * @throws IOException @@ -308,7 +312,7 @@ public final class LeappFileProcessor { * Process the Leapp files that were found that match the xml mapping file * * @param LeappFilesToProcess List of files to process - * @param dataSource The data source. + * @param dataSource The data source. * * @throws FileNotFoundException * @throws IOException @@ -318,7 +322,7 @@ public final class LeappFileProcessor { for (String LeappFileName : LeappFilesToProcess) { String fileName = FilenameUtils.getName(LeappFileName); - File LeappFile = new File(LeappFileName); + File LeappFile = new File(LeappFileName); if (tsvFileAttributes.containsKey(fileName)) { List attrList = tsvFileAttributes.get(fileName); BlackboardArtifact.Type artifactType = tsvFileArtifacts.get(fileName); @@ -345,12 +349,12 @@ public final class LeappFileProcessor { String trackpointSegmentName = null; GeoTrackPoints pointList = new GeoTrackPoints(); AbstractFile geoAbstractFile = null; - + if (LeappFile == null || !LeappFile.exists() || fileName == null) { - logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile == null ? LeappFile.toString() : "")); + logger.log(Level.WARNING, String.format("Leap file: %s is null or does not exist", LeappFile != null ? LeappFile.toString() : "")); return; } else if (attrList == null || artifactType == null || dataSource == null) { - logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile == null ? LeappFile.toString() : "")); + logger.log(Level.WARNING, String.format("attribute list, artifact type or dataSource not provided for %s", LeappFile.toString())); return; } @@ -405,11 +409,10 @@ public final class LeappFileProcessor { } } } - + try { - if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase() == "trackpoint") { - (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile)).addTrack(trackpointSegmentName, pointList, new ArrayList<>()); - + if (ACCOUNT_RELATIONSHIPS.getOrDefault(fileName.toLowerCase(), "norelationship").toLowerCase().equals("trackpoint")) { + (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, "", geoAbstractFile, context.getJobId())).addTrack(trackpointSegmentName, pointList, new ArrayList<>()); } } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS @@ -418,10 +421,9 @@ public final class LeappFileProcessor { } @NbBundle.Messages({ - "LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact.", + "LeappFileProcessor.cannot.create.waypoint.relationship=Cannot create TSK_WAYPOINT artifact." }) - - private void createRoute (Collection bbattributes, Content dataSource, String fileName) throws IngestModuleException { + private void createRoute(Collection bbattributes, Content dataSource, String fileName) throws IngestModuleException { Double startLatitude = Double.valueOf(0); Double startLongitude = Double.valueOf(0); @@ -433,9 +435,9 @@ public final class LeappFileProcessor { Long dateTime = Long.valueOf(0); Collection otherAttributes = new ArrayList<>(); String sourceFile = null; - AbstractFile absFile = null; + AbstractFile absFile; String comment = ""; - + try { for (BlackboardAttribute bba : bbattributes) { switch (bba.getAttributeType().getTypeName()) { @@ -478,19 +480,17 @@ public final class LeappFileProcessor { GeoWaypoints waypointList = new GeoWaypoints(); waypointList.addPoint(new Waypoint(startLatitude, startLongitude, zeroValue, "")); waypointList.addPoint(new Waypoint(endLatitude, endLongitude, zeroValue, locationName)); - (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addRoute(destinationName, dateTime, waypointList, new ArrayList<>()); - + (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addRoute(destinationName, dateTime, waypointList, new ArrayList<>()); + } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_waypoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS } - - - } - - @NbBundle.Messages({ - "LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact.", - }) + } + + @NbBundle.Messages({ + "LeappFileProcessor.cannot.create.trackpoint.relationship=Cannot create TSK_TRACK_POINT artifact." + }) private AbstractFile createTrackpoint(Collection bbattributes, Content dataSource, String fileName, String trackpointSegmentName, GeoTrackPoints pointList) throws IngestModuleException { Double latitude = Double.valueOf(0); @@ -503,7 +503,7 @@ public final class LeappFileProcessor { String sourceFile = null; String comment = null; AbstractFile absFile = null; - + try { for (BlackboardAttribute bba : bbattributes) { switch (bba.getAttributeType().getTypeName()) { @@ -538,29 +538,24 @@ public final class LeappFileProcessor { if (absFile == null) { absFile = (AbstractFile) dataSource; } - if ((trackpointSegmentName == null) || (trackpointSegmentName == segmentName)) { - trackpointSegmentName = segmentName; - pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); + if ((trackpointSegmentName == null) || (trackpointSegmentName.equals(segmentName))) { + pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); } else { - (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile)).addTrack(segmentName, pointList, new ArrayList<>()); - trackpointSegmentName = segmentName; - pointList = new GeoTrackPoints(); - pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); - + (new GeoArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), moduleName, comment, absFile, context.getJobId())).addTrack(segmentName, pointList, new ArrayList<>()); + pointList.addPoint(new TrackPoint(latitude, longitude, altitude, segmentName, zeroValue, zeroValue, zeroValue, dateTime)); + } } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_trackpoint_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS } - - return absFile; - + + return absFile; + } - @NbBundle.Messages({ - "LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship.", + "LeappFileProcessor.cannot.create.message.relationship=Cannot create TSK_MESSAGE Relationship." }) - private void createMessageRelationship(Collection bbattributes, Content dataSource, String fileName) throws IngestModuleException { String messageType = null; @@ -577,7 +572,7 @@ public final class LeappFileProcessor { List otherAttributes = new ArrayList<>(); List fileAttachments = new ArrayList<>(); String sourceFile = null; - MessageAttachments messageAttachments = null; + MessageAttachments messageAttachments; try { for (BlackboardAttribute bba : bbattributes) { @@ -614,7 +609,7 @@ public final class LeappFileProcessor { sourceFile = bba.getValueString(); break; case "TSK_READ_STATUS": - if (bba.getValueInt() == 1 ) { + if (bba.getValueInt() == 1) { messageStatus = MessageReadStatus.READ; } else { messageStatus = MessageReadStatus.UNREAD; @@ -638,22 +633,22 @@ public final class LeappFileProcessor { AbstractFile absFile = findAbstractFile(dataSource, sourceFile); if (absFile == null) { absFile = (AbstractFile) dataSource; - } - CommunicationArtifactsHelper accountArtifact; - Account.Type accountType = getAccountType(fileName); - if (alternateId == null) { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType); - } else { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType, accountType, alternateId); } - BlackboardArtifact messageArtifact = accountArtifact.addMessage(messageType, communicationDirection, senderId, - receipentId, dateTime, messageStatus, subject, - messageText, threadId, otherAttributes); + CommunicationArtifactsHelper accountHelper; + Account.Type accountType = getAccountType(fileName); + if (alternateId == null) { + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, context.getJobId()); + } else { + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, accountType, alternateId, context.getJobId()); + } + BlackboardArtifact messageArtifact = accountHelper.addMessage(messageType, communicationDirection, senderId, + receipentId, dateTime, messageStatus, subject, + messageText, threadId, otherAttributes); if (!fileAttachments.isEmpty()) { messageAttachments = new MessageAttachments(fileAttachments, new ArrayList<>()); - accountArtifact.addAttachments(messageArtifact, messageAttachments); + accountHelper.addAttachments(messageArtifact, messageAttachments); } } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_message_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS @@ -662,7 +657,7 @@ public final class LeappFileProcessor { } @NbBundle.Messages({ - "LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship.", + "LeappFileProcessor.cannot.create.contact.relationship=Cannot create TSK_CONTACT Relationship." }) private void createContactRelationship(Collection bbattributes, Content dataSource, String fileName) throws IngestModuleException { @@ -715,16 +710,16 @@ public final class LeappFileProcessor { } Account.Type accountType = getAccountType(fileName); if (accountType != null) { - - CommunicationArtifactsHelper accountArtifact; + + CommunicationArtifactsHelper accountHelper; if (alternateId == null) { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType); + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, context.getJobId()); } else { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType, accountType, alternateId); + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, accountType, alternateId, context.getJobId()); } - BlackboardArtifact messageArtifact = accountArtifact.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes); + BlackboardArtifact messageArtifact = accountHelper.addContact(contactName, phoneNumber, homePhoneNumber, mobilePhoneNumber, emailAddr, otherAttributes); } } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_contact_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS @@ -732,14 +727,13 @@ public final class LeappFileProcessor { } @NbBundle.Messages({ - "LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship.", + "LeappFileProcessor.cannot.create.calllog.relationship=Cannot create TSK_CALLLOG Relationship." }) - private void createCalllogRelationship(Collection bbattributes, Content dataSource, String fileName) throws IngestModuleException { String callerId = null; String alternateId = null; - List calleeId = Arrays.asList(); + List calleeId = Arrays.asList(); CommunicationDirection communicationDirection = CommunicationDirection.UNKNOWN; Long startDateTime = Long.valueOf(0); Long endDateTime = Long.valueOf(0); @@ -751,14 +745,14 @@ public final class LeappFileProcessor { for (BlackboardAttribute bba : bbattributes) { switch (bba.getAttributeType().getTypeName()) { case "TSK_TEXT_FILE": - sourceFile = bba.getValueString(); - break; + sourceFile = bba.getValueString(); + break; case "TSK_DATETIME_START": - startDateTime = bba.getValueLong(); - break; + startDateTime = bba.getValueLong(); + break; case "TSK_DATETIME_END": - startDateTime = bba.getValueLong(); - break; + startDateTime = bba.getValueLong(); + break; case "TSK_DIRECTION": if (bba.getValueString().toLowerCase().equals("outgoing")) { communicationDirection = CommunicationDirection.OUTGOING; @@ -773,8 +767,8 @@ public final class LeappFileProcessor { break; case "TSK_PHONE_NUMBER_TO": if (!bba.getValueString().isEmpty()) { - String [] calleeTempList = bba.getValueString().split(",", 0); - calleeId = Arrays.asList(calleeTempList); + String[] calleeTempList = bba.getValueString().split(",", 0); + calleeId = Arrays.asList(calleeTempList); } break; case "TSK_ID": @@ -786,32 +780,32 @@ public final class LeappFileProcessor { break; } } - - if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING) { - String [] calleeTempList = callerId.split(",", 0); - calleeId = Arrays.asList(calleeTempList); - callerId = null; - } + + if (calleeId.isEmpty() && communicationDirection == CommunicationDirection.OUTGOING && callerId != null) { + String[] calleeTempList = callerId.split(",", 0); + calleeId = Arrays.asList(calleeTempList); + callerId = null; + } AbstractFile absFile = findAbstractFile(dataSource, sourceFile); if (absFile == null) { absFile = (AbstractFile) dataSource; } Account.Type accountType = getAccountType(fileName); - CommunicationArtifactsHelper accountArtifact; + CommunicationArtifactsHelper accountHelper; if (accountType != null) { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType); + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, context.getJobId()); } else { - accountArtifact = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), - moduleName, absFile, accountType, accountType, alternateId); + accountHelper = new CommunicationArtifactsHelper(Case.getCurrentCaseThrows().getSleuthkitCase(), + moduleName, absFile, accountType, accountType, alternateId, context.getJobId()); } - BlackboardArtifact callLogArtifact = accountArtifact.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes); + accountHelper.addCalllog(communicationDirection, callerId, calleeId, startDateTime, endDateTime, mediaType, otherAttributes); } catch (NoCurrentCaseException | TskCoreException | BlackboardException ex) { throw new IngestModuleException(Bundle.LeappFileProcessor_cannot_create_calllog_relationship() + ex.getLocalizedMessage(), ex); //NON-NLS } } - + private Account.Type getAccountType(String AccountTypeName) { switch (AccountTypeName.toLowerCase()) { case "zapya.tsv": @@ -849,7 +843,7 @@ public final class LeappFileProcessor { case "whatsapp - contacts.tsv": return Account.Type.WHATSAPP; case "tangomessages messages.tsv": - return Account.Type.TANGO; + return Account.Type.TANGO; case "shareit file transfer.tsv": return Account.Type.SHAREIT; case "line - calllogs.tsv": @@ -880,20 +874,22 @@ public final class LeappFileProcessor { return Account.Type.PHONE; } } - + /** * Process the line read and create the necessary attributes for it. * - * @param lineValues List of column values. + * @param lineValues List of column values. * @param columnIndexes Mapping of column headers (trimmed; to lower case) - * to column index. All header columns and only all header columns should be - * present. - * @param attrList The list of attributes as specified for the schema of - * this file. - * @param fileName The name of the file being processed. - * @param lineNum The line number in the file. + * to column index. All header columns and only all + * header columns should be present. + * @param attrList The list of attributes as specified for the schema + * of this file. + * @param fileName The name of the file being processed. + * @param lineNum The line number in the file. + * * @return The collection of blackboard attributes for the artifact created - * from this line. + * from this line. + * * @throws IngestModuleException */ private Collection processReadLine(List lineValues, Map columnIndexes, @@ -930,7 +926,7 @@ public final class LeappFileProcessor { String formattedValue = formatValueBasedOnAttrType(colAttr, value); - BlackboardAttribute attr = (value == null) ? null : getAttribute(colAttr.getAttributeType(), formattedValue, fileName); + BlackboardAttribute attr = getAttribute(colAttr.getAttributeType(), formattedValue, fileName); if (attr == null) { logger.log(Level.WARNING, String.format("Blackboard attribute could not be parsed column %s at line %d in file %s. Omitting row.", colAttr.getColumnName(), lineNum, fileName)); return Collections.emptyList(); @@ -949,9 +945,10 @@ public final class LeappFileProcessor { * Check type of attribute and possibly format string based on it. * * @param colAttr Column Attribute information - * @param value string to be formatted + * @param value string to be formatted + * * @return formatted string based on attribute type if no attribute type - * found then return original string + * found then return original string */ private String formatValueBasedOnAttrType(TsvColumn colAttr, String value) { if (colAttr.getAttributeType().getTypeName().equals("TSK_DOMAIN")) { @@ -971,9 +968,10 @@ public final class LeappFileProcessor { * value. * * @param attrType The attribute type. - * @param value The string value to be converted to the appropriate data - * type for the attribute type. + * @param value The string value to be converted to the appropriate data + * type for the attribute type. * @param fileName The file name that the value comes from. + * * @return The generated blackboard attribute. */ private BlackboardAttribute getAttribute(BlackboardAttribute.Type attrType, String value, String fileName) { @@ -998,7 +996,7 @@ public final class LeappFileProcessor { (v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v).longValue())); case DOUBLE: return parseAttrValue(value.trim(), attrType, fileName, true, false, - (v) -> new BlackboardAttribute(attrType, moduleName, (double) Double.valueOf(v))); + (v) -> new BlackboardAttribute(attrType, moduleName, Double.valueOf(v))); case BYTE: return parseAttrValue(value.trim(), attrType, fileName, true, false, (v) -> new BlackboardAttribute(attrType, moduleName, new byte[]{Byte.valueOf(v)})); @@ -1022,7 +1020,9 @@ public final class LeappFileProcessor { * Handles converting a string value to a blackboard attribute. * * @param orig The original string value. + * * @return The generated blackboard attribute. + * * @throws ParseException * @throws NumberFormatException */ @@ -1033,36 +1033,41 @@ public final class LeappFileProcessor { * Runs parsing function on string value to convert to right data type and * generates a blackboard attribute for that converted data type. * - * @param value The string value. - * @param attrType The blackboard attribute type. - * @param fileName The name of the file from which the value comes. - * @param blankIsNull If string is blank return null attribute. - * @param zeroIsNull If string is some version of 0, return null attribute. + * @param value The string value. + * @param attrType The blackboard attribute type. + * @param fileName The name of the file from which the value comes. + * @param blankIsNull If string is blank return null attribute. + * @param zeroIsNull If string is some version of 0, return null + * attribute. * @param valueConverter The means of converting the string value to an - * appropriate blackboard attribute. + * appropriate blackboard attribute. + * * @return The generated blackboard attribute or null if not determined. */ private BlackboardAttribute parseAttrValue(String value, BlackboardAttribute.Type attrType, String fileName, boolean blankIsNull, boolean zeroIsNull, ParseExceptionFunction valueConverter) { // remove non-printable characters from tsv input // https://stackoverflow.com/a/6199346 - value = value.replaceAll("\\p{C}", ""); + String sanitizedValue = value.replaceAll("\\p{C}", ""); - if (blankIsNull && StringUtils.isBlank(value)) { + if (blankIsNull && StringUtils.isBlank(sanitizedValue)) { return null; } - if (zeroIsNull && value.matches("^\\s*[0\\.]*\\s*$")) { + if (zeroIsNull && sanitizedValue.matches("^\\s*[0\\.]*\\s*$")) { return null; } try { - return valueConverter.apply(value); + return valueConverter.apply(sanitizedValue); } catch (NumberFormatException | ParseException ex) { - logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", value, attrType.getValueType().getLabel(), fileName), ex); + logger.log(Level.WARNING, String.format("Unable to format '%s' as value type %s while converting to attributes from %s.", sanitizedValue, attrType.getValueType().getLabel(), fileName), ex); return null; } } + /** + * Read the XML config file and load the mappings into maps + */ @NbBundle.Messages({ "LeappFileProcessor.cannot.load.artifact.xml=Cannot load xml artifact file.", "LeappFileProcessor.cannotBuildXmlParser=Cannot buld an XML parser.", @@ -1070,10 +1075,6 @@ public final class LeappFileProcessor { "LeappFileProcessor.postartifacts_error=Error posting Blackboard Artifact", "LeappFileProcessor.error.creating.new.artifacts=Error creating new artifacts." }) - - /** - * Read the XML config file and load the mappings into maps - */ private void loadConfigFile() throws IngestModuleException { Document xmlinput; try { @@ -1120,7 +1121,7 @@ public final class LeappFileProcessor { BlackboardArtifact.Type foundArtifactType = null; try { - foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getArtifactType(artifactName); + foundArtifactType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getArtifactType(artifactName); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch artifact type for %s.", artifactName), ex); } @@ -1157,7 +1158,7 @@ public final class LeappFileProcessor { for (int k = 0; k < attributeNlist.getLength(); k++) { NamedNodeMap nnm = attributeNlist.item(k).getAttributes(); String attributeName = nnm.getNamedItem("attributename").getNodeValue(); - + if (!attributeName.toLowerCase().matches("null")) { String columnName = nnm.getNamedItem("columnName").getNodeValue(); String required = nnm.getNamedItem("required").getNodeValue(); @@ -1165,7 +1166,7 @@ public final class LeappFileProcessor { BlackboardAttribute.Type foundAttrType = null; try { - foundAttrType = Case.getCurrentCase().getSleuthkitCase().getAttributeType(attributeName.toUpperCase()); + foundAttrType = Case.getCurrentCase().getSleuthkitCase().getBlackboard().getAttributeType(attributeName.toUpperCase()); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("There was an issue that arose while trying to fetch attribute type for %s.", attributeName), ex); } @@ -1181,10 +1182,13 @@ public final class LeappFileProcessor { if (columnName == null) { logger.log(Level.SEVERE, String.format("No column name provided for [%s]", getXmlAttrIdentifier(parentName, attributeName))); + continue; } else if (columnName.trim().length() != columnName.length()) { logger.log(Level.SEVERE, String.format("Column name '%s' starts or ends with whitespace for [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName))); + continue; } else if (columnName.matches("[^ \\S]")) { logger.log(Level.SEVERE, String.format("Column name '%s' contains invalid characters [%s]", columnName, getXmlAttrIdentifier(parentName, attributeName))); + continue; } TsvColumn thisCol = new TsvColumn( @@ -1209,11 +1213,12 @@ public final class LeappFileProcessor { /** * Generic method for creating a blackboard artifact with attributes * - * @param artType The artifact type. - * @param dataSource is the Content object that needs to have the artifact - * added for it + * @param artType The artifact type. + * @param dataSource is the Content object that needs to have the artifact + * added for it * @param bbattributes is the collection of blackboard attributes that need - * to be added to the artifact after the artifact has been created + * to be added to the artifact after the artifact has + * been created * * @return The newly-created artifact, or null on error */ @@ -1225,7 +1230,7 @@ public final class LeappFileProcessor { case ANALYSIS_RESULT: return dataSource.newAnalysisResult(artType, Score.SCORE_UNKNOWN, null, null, null, bbattributes).getAnalysisResult(); default: - logger.log(Level.SEVERE, "Unknown category type: " + artType.getCategory().getDisplayName()); + logger.log(Level.SEVERE, String.format("Unknown category type: %s", artType.getCategory().getDisplayName())); return null; } } catch (TskException ex) { @@ -1238,7 +1243,7 @@ public final class LeappFileProcessor { * Method to post a list of BlackboardArtifacts to the blackboard. * * @param artifacts A list of artifacts. IF list is empty or null, the - * function will return. + * function will return. */ void postArtifacts(Collection artifacts) { if (artifacts == null || artifacts.isEmpty()) { @@ -1246,7 +1251,7 @@ public final class LeappFileProcessor { } try { - Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName); + Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifacts(artifacts, moduleName, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, Bundle.LeappFileProcessor_postartifacts_error(), ex); //NON-NLS } @@ -1259,7 +1264,7 @@ public final class LeappFileProcessor { */ private void configExtractor() throws IOException { PlatformUtil.extractResourceToUserConfigDir(LeappFileProcessor.class, - xmlFile, true); + xmlFile, true); } private static final Set ALLOWED_EXTENSIONS = new HashSet<>(Arrays.asList("zip", "tar", "tgz")); @@ -1316,14 +1321,14 @@ public final class LeappFileProcessor { } } - + private AbstractFile findAbstractFile(Content dataSource, String fileNamePath) { if (fileNamePath == null) { return null; } - + List files; - + String fileName = FilenameUtils.getName(fileNamePath); String filePath = FilenameUtils.normalize(FilenameUtils.getPath(fileNamePath), true); @@ -1347,4 +1352,4 @@ public final class LeappFileProcessor { return null; } - } +} diff --git a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java index cb6e017067..2806931f69 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java +++ b/Core/src/org/sleuthkit/autopsy/modules/pictureanalyzer/impls/EXIFProcessor.java @@ -172,7 +172,7 @@ public class EXIFProcessor implements PictureProcessor { artifacts.add(userSuspectedArtifact); try { - blackboard.postArtifacts(artifacts, MODULE_NAME); + blackboard.postArtifacts(artifacts, MODULE_NAME, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Error posting TSK_METADATA_EXIF and TSK_USER_CONTENT_SUSPECTED artifacts for %s (object ID = %d)", file.getName(), file.getId()), ex); //NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java index f2b7e5942f..5a7a1646ed 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2018-2019 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -352,7 +352,7 @@ public class PlasoIngestModule implements DataSourceIngestModule { * keyword search, and fire an event to notify UI of * this new artifact */ - blackboard.postArtifact(bbart, MODULE_NAME); + blackboard.postArtifact(bbart, MODULE_NAME, context.getJobId()); } catch (BlackboardException ex) { logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS } diff --git a/Core/src/org/sleuthkit/autopsy/modules/yara/YaraIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/yara/YaraIngestModule.java index 40d4c4abb7..b2980ddcbb 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/yara/YaraIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/yara/YaraIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2020 Basis Technology Corp. + * Copyright 2020-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -159,7 +159,7 @@ public class YaraIngestModule extends FileIngestModuleAdapter { if(!artifacts.isEmpty()) { Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); - blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName()); + blackboard.postArtifacts(artifacts, YaraIngestModuleFactory.getModuleName(), context.getJobId()); } } catch (BlackboardException | NoCurrentCaseException | IngestModuleException | TskCoreException | YaraWrapperException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/report/infrastructure/TableReportGenerator.java b/Core/src/org/sleuthkit/autopsy/report/infrastructure/TableReportGenerator.java index e2c4d0d1eb..0ab92ddd8c 100644 --- a/Core/src/org/sleuthkit/autopsy/report/infrastructure/TableReportGenerator.java +++ b/Core/src/org/sleuthkit/autopsy/report/infrastructure/TableReportGenerator.java @@ -45,7 +45,6 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.services.TagsManager; import org.sleuthkit.autopsy.coreutils.ImageUtils; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.report.ReportProgressPanel; import static org.sleuthkit.autopsy.casemodule.services.TagsManager.getNotableTagLabel; import org.sleuthkit.autopsy.coreutils.TimeZoneUtils; @@ -358,7 +357,7 @@ class TableReportGenerator { // Give the modules the rows for the content tags. for (ContentTag tag : tags) { try { - if(shouldFilterFromReport(tag.getContent())) { + if (shouldFilterFromReport(tag.getContent())) { continue; } } catch (TskCoreException ex) { @@ -366,7 +365,7 @@ class TableReportGenerator { logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS return; } - + // skip tags that we are not reporting on String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : ""; if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) { @@ -451,15 +450,15 @@ class TableReportGenerator { // Give the modules the rows for the content tags. for (BlackboardArtifactTag tag : tags) { try { - if(shouldFilterFromReport(tag.getContent())) { + if (shouldFilterFromReport(tag.getContent())) { continue; } - } catch (TskCoreException ex) { + } catch (TskCoreException ex) { errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBArtifactTags")); logger.log(Level.SEVERE, "Failed to access content data from the case database.", ex); //NON-NLS return; } - + String notableString = tag.getName().getKnownStatus() == TskData.FileKnown.BAD ? TagsManager.getNotableTagLabel() : ""; if (passesTagNamesFilter(tag.getName().getDisplayName() + notableString) == false) { continue; @@ -813,7 +812,7 @@ class TableReportGenerator { AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId); if (f != null) { uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath(); - if(shouldFilterFromReport(f)) { + if (shouldFilterFromReport(f)) { continue; } } @@ -973,7 +972,7 @@ class TableReportGenerator { AbstractFile f = openCase.getSleuthkitCase().getAbstractFileById(objId); if (f != null) { uniquePath = openCase.getSleuthkitCase().getAbstractFileById(objId).getUniquePath(); - if(shouldFilterFromReport(f)) { + if (shouldFilterFromReport(f)) { continue; } } @@ -1217,11 +1216,11 @@ class TableReportGenerator { private List getFilteredArtifacts(BlackboardArtifact.Type type, HashSet tagNamesFilter) { List artifacts = new ArrayList<>(); try { - for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardArtifacts(type.getTypeID())) { - if(shouldFilterFromReport(artifact)) { + for (BlackboardArtifact artifact : Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getArtifacts(Collections.singletonList(type), settings.getSelectedDataSources())) { + if (shouldFilterFromReport(artifact)) { continue; } - + List tags = Case.getCurrentCaseThrows().getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact); HashSet uniqueTagNames = new HashSet<>(); for (BlackboardArtifactTag tag : tags) { @@ -1232,7 +1231,7 @@ class TableReportGenerator { continue; } try { - artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboardAttributes(artifact), uniqueTagNames)); + artifacts.add(new ArtifactData(artifact, Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().getBlackboardAttributes(artifact), uniqueTagNames)); } catch (TskCoreException ex) { errorList.add(NbBundle.getMessage(this.getClass(), "ReportGenerator.errList.failedGetBBAttribs")); logger.log(Level.SEVERE, "Failed to get Blackboard Attributes when generating report.", ex); //NON-NLS @@ -1339,7 +1338,7 @@ class TableReportGenerator { new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH))); columns.add(new AttributeColumn(NbBundle.getMessage(this.getClass(), "ReportGenerator.artTableColHdr.dateTime"), - new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED ))); + new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED))); attributeTypeSet.remove(new Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID)); } else if (BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG.getTypeID() == artifactTypeId) { @@ -1817,19 +1816,19 @@ class TableReportGenerator { return ""; } - + /** * Indicates if the content should be filtered from the report. */ private boolean shouldFilterFromReport(Content content) throws TskCoreException { - if(this.settings.getSelectedDataSources() == null) { + if (this.settings.getSelectedDataSources() == null) { return false; } - + if (content.getDataSource() == null) { return false; } - + long dataSourceId = content.getDataSource().getId(); return !this.settings.getSelectedDataSources().contains(dataSourceId); } diff --git a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java index f18f05f61f..830f706b7d 100644 --- a/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/report/modules/portablecase/PortableCaseReportModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019-2020 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -354,7 +354,7 @@ public class PortableCaseReportModule implements ReportModule { } for (BlackboardAttribute.ATTRIBUTE_TYPE type : BlackboardAttribute.ATTRIBUTE_TYPE.values()) { try { - oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getAttributeType(type.getLabel())); + oldAttrTypeIdToNewAttrType.put(type.getTypeID(), portableSkCase.getBlackboard().getAttributeType(type.getLabel())); } catch (TskCoreException ex) { handleError("Error looking up attribute name " + type.getLabel(), Bundle.PortableCaseReportModule_generateReport_errorLookingUpAttrType(type.getLabel()), @@ -1084,7 +1084,7 @@ public class PortableCaseReportModule implements ReportModule { return oldArtTypeIdToNewArtTypeId.get(oldArtifact.getArtifactTypeID()); } - BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getArtifactType(oldArtifact.getArtifactTypeName()); + BlackboardArtifact.Type oldCustomType = currentCase.getSleuthkitCase().getBlackboard().getArtifactType(oldArtifact.getArtifactTypeName()); try { BlackboardArtifact.Type newCustomType = portableSkCase.getBlackboard().getOrAddArtifactType(oldCustomType.getTypeName(), oldCustomType.getDisplayName()); oldArtTypeIdToNewArtTypeId.put(oldArtifact.getArtifactTypeID(), newCustomType.getTypeID()); @@ -1424,7 +1424,7 @@ public class PortableCaseReportModule implements ReportModule { // Add the attachment. The account type specified in the constructor will not be used. CommunicationArtifactsHelper communicationArtifactsHelper = new CommunicationArtifactsHelper(currentCase.getSleuthkitCase(), - newSourceStr, newFile, Account.Type.EMAIL); + newSourceStr, newFile, Account.Type.EMAIL, null); communicationArtifactsHelper.addAttachments(newArtifact, new MessageAttachments(newFileAttachments, msgAttachments.getUrlAttachments())); } catch (BlackboardJsonAttrUtil.InvalidJsonException ex) { diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java index 834adc0a97..3fb49998a3 100644 --- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java +++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2020 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -23,7 +23,6 @@ import java.util.List; import javax.xml.bind.DatatypeConverter; import org.joda.time.DateTime; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.Blackboard.BlackboardException; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -87,6 +86,7 @@ final class CustomArtifactType { * blackboard. * * @param source The artifact source content. + * @param ingestJobId The ingest job ID. * * @return A BlackboardArtifact object. * @@ -95,7 +95,7 @@ final class CustomArtifactType { * @throws Blackboard.BlackboardException If there is an error posting the * artifact to the blackboard. */ - static BlackboardArtifact createAndPostInstance(Content source) throws TskCoreException, Blackboard.BlackboardException { + static BlackboardArtifact createAndPostInstance(Content source, long ingestJobId) throws TskCoreException, Blackboard.BlackboardException { List attributes = new ArrayList<>(); attributes.add(new BlackboardAttribute(intAttrType, MODULE_NAME, 0)); attributes.add(new BlackboardAttribute(doubleAttrType, MODULE_NAME, 0.0)); @@ -131,7 +131,7 @@ final class CustomArtifactType { } Blackboard blackboard = Case.getCurrentCase().getServices().getArtifactsBlackboard(); - blackboard.postArtifact(artifact, MODULE_NAME); + blackboard.postArtifact(artifact, MODULE_NAME, ingestJobId); return artifact; } diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java index ffe9b595bf..8cbd01ef02 100644 --- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2020 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -38,7 +38,8 @@ import org.sleuthkit.datamodel.TskCoreException; public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceIngestModuleAdapter { private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorDataSourceIngestModule.class.getName()); - + private IngestJobContext context; + /** * Adds the custom artifact type this module uses to the case database of * the current case. @@ -51,6 +52,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge */ @Override public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; try { CustomArtifactType.addToCaseDatabase(); } catch (Blackboard.BlackboardException ex) { @@ -70,7 +72,7 @@ public class CustomArtifactsCreatorDataSourceIngestModule extends DataSourceInge @Override public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { try { - CustomArtifactType.createAndPostInstance(dataSource); + CustomArtifactType.createAndPostInstance(dataSource, context.getJobId()); } catch (TskCoreException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Failed to process data source (obj_id = %d)", dataSource.getId()), ex); return ProcessResult.ERROR; diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java index de015d8a21..235ef1f84b 100644 --- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2017-2020 Basis Technology Corp. + * Copyright 2017-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -37,6 +37,7 @@ import org.sleuthkit.datamodel.TskCoreException; final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapter { private static final Logger logger = Logger.getLogger(CustomArtifactsCreatorFileIngestModule.class.getName()); + private IngestJobContext context; /** * Adds the custom artifact type this module uses to the case database of @@ -50,6 +51,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt */ @Override public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; try { CustomArtifactType.addToCaseDatabase(); } catch (Blackboard.BlackboardException ex) { @@ -71,7 +73,7 @@ final class CustomArtifactsCreatorFileIngestModule extends FileIngestModuleAdapt return ProcessResult.OK; } try { - CustomArtifactType.createAndPostInstance(file); + CustomArtifactType.createAndPostInstance(file, context.getJobId()); } catch (TskCoreException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Failed to process file (obj_id = %d)", file.getId()), ex); return ProcessResult.ERROR; diff --git a/Core/src/org/sleuthkit/autopsy/testutils/IngestJobRunner.java b/Core/src/org/sleuthkit/autopsy/testutils/IngestJobRunner.java index c424e057c2..59c05dd603 100755 --- a/Core/src/org/sleuthkit/autopsy/testutils/IngestJobRunner.java +++ b/Core/src/org/sleuthkit/autopsy/testutils/IngestJobRunner.java @@ -25,6 +25,7 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; import java.util.Set; +import javax.annotation.concurrent.GuardedBy; import org.sleuthkit.autopsy.events.AutopsyEvent; import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobStartResult; @@ -53,7 +54,7 @@ public final class IngestJobRunner { */ public static List runIngestJob(Collection dataSources, IngestJobSettings settings) throws InterruptedException { Object ingestMonitor = new Object(); - IngestJobCompletiontListener completiontListener = new IngestJobCompletiontListener(ingestMonitor); + IngestJobCompletionListener completiontListener = new IngestJobCompletionListener(ingestMonitor, dataSources.size()); IngestManager ingestManager = IngestManager.getInstance(); ingestManager.addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, completiontListener); try { @@ -81,9 +82,12 @@ public final class IngestJobRunner { * An ingest job event listener that allows IngestRunner.runIngestJob to * block until the specified ingest job is completed. */ - private static final class IngestJobCompletiontListener implements PropertyChangeListener { + private static final class IngestJobCompletionListener implements PropertyChangeListener { private final Object ingestMonitor; + + @GuardedBy("ingestMonitor") + private int remainingJobsCount; /** * Constructs an ingest job event listener that allows @@ -92,9 +96,11 @@ public final class IngestJobRunner { * * @param ingestMonitor A Java object to notify when the ingest job is * omcpleted. + * @param jobsCount The number of jobs to listen for before notifying monitor. */ - IngestJobCompletiontListener(Object ingestMonitor) { + IngestJobCompletionListener(Object ingestMonitor, int jobsCount) { this.ingestMonitor = ingestMonitor; + this.remainingJobsCount = jobsCount; } /** @@ -109,7 +115,10 @@ public final class IngestJobRunner { String eventType = event.getPropertyName(); if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { synchronized (ingestMonitor) { - ingestMonitor.notify(); + this.remainingJobsCount--; + if (this.remainingJobsCount <= 0) { + ingestMonitor.notify(); + } } } } diff --git a/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java b/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java index 4b7f43cb8f..acae10c1e0 100755 --- a/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java +++ b/Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java @@ -157,7 +157,7 @@ public class AddManualEvent extends Action { BlackboardArtifact artifact = eventInfo.datasource.newDataArtifact(new BlackboardArtifact.Type(TSK_TL_EVENT), attributes, null); try { - sleuthkitCase.getBlackboard().postArtifact(artifact, source); + sleuthkitCase.getBlackboard().postArtifact(artifact, source, null); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait(); diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IngestedWithHashAndFileTypeIntraCaseTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IngestedWithHashAndFileTypeIntraCaseTest.java index 80ec639677..f5807509d0 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IngestedWithHashAndFileTypeIntraCaseTest.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IngestedWithHashAndFileTypeIntraCaseTest.java @@ -20,6 +20,7 @@ package org.sleuthkit.autopsy.commonpropertiessearch; import java.sql.SQLException; +import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -88,6 +89,26 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { this.utils.tearDown(); } + /** + * Assert that the given file appears a precise number times in the given + * data source. + * + * @param searchDomain search domain + * @param objectIdToDataSourceMap mapping of file ids to data source names + * @param fileName name of file to search for + * @param dataSource name of data source where file should + * appear + * @param instanceCount number of appearances of the given file + * + * @return true if a file with the given name exists the specified number of + * times in the given data source + */ + static void assertInstanceExistenceAndCount(List searchDomain, Map objectIdToDataSourceMap, String fileName, String dataSource, int instanceCount) { + int foundCount = IntraCaseTestUtils.getInstanceCount(searchDomain, objectIdToDataSourceMap, fileName, dataSource); + String errorMessage = MessageFormat.format("Expected to find {0} matches for {1} in {2} but found {3}.", instanceCount, fileName, dataSource, foundCount); + assertEquals(errorMessage, instanceCount, foundCount); + } + /** * Find all matches & all file types. Confirm file.jpg is found on all three * and file.docx is found on two. @@ -103,25 +124,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = IntraCaseTestUtils.getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -144,25 +165,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -185,25 +206,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -227,25 +248,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -269,25 +290,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -311,25 +332,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -353,25 +374,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -394,25 +415,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); @@ -435,25 +456,25 @@ public class IngestedWithHashAndFileTypeIntraCaseTest extends NbTestCase { List files = getFiles(objectIdToDataSource.keySet()); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET1, 2)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET2, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, IMG, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET1, 2); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET2, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, IMG, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET1, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET3, 1)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, DOC, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET1, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET3, 1); + assertInstanceExistenceAndCount(files, objectIdToDataSource, DOC, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, PDF, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, PDF, SET4, 0); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0)); - assertTrue(verifyInstanceExistanceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0)); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET1, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET2, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET3, 0); + assertInstanceExistenceAndCount(files, objectIdToDataSource, EMPTY, SET4, 0); } catch (NoCurrentCaseException | TskCoreException | SQLException | CentralRepoException ex) { Exceptions.printStackTrace(ex); diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IntraCaseTestUtils.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IntraCaseTestUtils.java index dee73e46f7..aa78aa75ed 100644 --- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IntraCaseTestUtils.java +++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/commonpropertiessearch/IntraCaseTestUtils.java @@ -179,6 +179,37 @@ class IntraCaseTestUtils { return tally == instanceCount; } + /** + * Verify that the given file appears a precise number times in the given + * data source. + * + * @param searchDomain search domain + * @param objectIdToDataSourceMap mapping of file ids to data source names + * @param fileName name of file to search for + * @param dataSource name of data source where file should appear + * @param instanceCount number of appearances of the given file + * @return The count of items found. + */ + static int getInstanceCount(List searchDomain, Map objectIdToDataSourceMap, String fileName, String dataSource) { + + int tally = 0; + + for (AbstractFile file : searchDomain) { + + Long objectId = file.getId(); + + String name = file.getName(); + + String dataSourceName = objectIdToDataSourceMap.get(objectId); + + if (name.equalsIgnoreCase(fileName) && dataSourceName.equalsIgnoreCase(dataSource)) { + tally++; + } + } + + return tally; + } + /** * Convenience method which verifies that a file exists within a given data * source exactly once. diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java index 656b98b278..fdecd191f3 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestControlPanel.java @@ -143,7 +143,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { private static final int COMPLETED_TIME_COL_PREFERRED_WIDTH = 280; private static final String UPDATE_TASKS_THREAD_NAME = "AID-update-tasks-%d"; private static final String LOCAL_HOST_NAME = NetworkUtils.getLocalHostName(); - private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice"; + private static final String RUNNING_AS_SERVICE_PROPERTY = "autoingest.runningasservice"; private static final Logger sysLogger = AutoIngestSystemLogger.getLogger(); private static AutoIngestControlPanel instance; private final DefaultTableModel pendingTableModel; @@ -160,7 +160,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * Maintain a mapping of each service to it's last status update. */ private final ConcurrentHashMap statusByService; - + /* * The enum is used in conjunction with the DefaultTableModel class to * provide table models for the JTables used to display a view of the @@ -177,7 +177,8 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime=Job Completed", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage", - "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage", + "# {0} - unitSeparator", + "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss)", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder=Case Folder", "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob= Local Job?", @@ -193,7 +194,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { STARTED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime")), COMPLETED_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CompletedTime")), STAGE(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage")), - STAGE_TIME(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime")), + STAGE_TIME(Bundle.AutoIngestControlPanel_JobsTableModel_ColumnHeader_StageTime(DurationCellRenderer.getUnitSeperator())), STATUS(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status")), CASE_DIRECTORY_PATH(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.CaseFolder")), IS_LOCAL_JOB(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.JobsTableModel.ColumnHeader.LocalJob")), @@ -250,7 +251,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * controlling automated ingest for a single node within the cluster. */ private AutoIngestControlPanel() { - + this.statusByService = new ConcurrentHashMap<>(); //Disable the main window so they can only use the dashboard (if we used setVisible the taskBar icon would go away) @@ -290,10 +291,10 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { * Update status of the services on the dashboard */ private void displayServicesStatus() { - tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message", - statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()), - statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), - statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), + tbServicesStatusMessage.setText(NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message", + statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()), + statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), + statusByService.get(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()), statusByService.get(ServicesMonitor.Service.MESSAGING.toString()))); String upStatus = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up"); if (statusByService.get(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()).compareTo(upStatus) != 0 @@ -304,7 +305,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { tbServicesStatusMessage.setForeground(Color.BLACK); } } - + /** * Queries the services monitor and sets the text for the services status * text box. @@ -411,7 +412,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { column.setMaxWidth(PRIORITY_COLUMN_MAX_WIDTH); column.setPreferredWidth(PRIORITY_COLUMN_PREFERRED_WIDTH); column.setWidth(PRIORITY_COLUMN_PREFERRED_WIDTH); - + column = pendingTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader()); column.setCellRenderer(new OcrIconCellRenderer()); column.setMaxWidth(OCR_COLUMN_MAX_WIDTH); @@ -469,7 +470,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader())); runningTable.removeColumn(runningTable.getColumn(JobsTableModelColumns.OCR.getColumnHeader())); - + /* * Set up a column to display the cases associated with the jobs. */ @@ -566,7 +567,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.CASE_DIRECTORY_PATH.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.MANIFEST_FILE_PATH.getColumnHeader())); completedTable.removeColumn(completedTable.getColumn(JobsTableModelColumns.PRIORITY.getColumnHeader())); - + /* * Set up a column to display the cases associated with the jobs. */ @@ -617,7 +618,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { column.setMaxWidth(STATUS_COL_MAX_WIDTH); column.setPreferredWidth(STATUS_COL_PREFERRED_WIDTH); column.setWidth(STATUS_COL_PREFERRED_WIDTH); - + /* * Set up a column to display OCR enabled/disabled flag. */ @@ -732,30 +733,30 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { } PropertyChangeListener propChangeListener = (PropertyChangeEvent evt) -> { - + String serviceDisplayName = ServicesMonitor.Service.valueOf(evt.getPropertyName()).toString(); String status = evt.getNewValue().toString(); - + if (status.equals(ServicesMonitor.ServiceStatus.UP.toString())) { status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Up"); } else if (status.equals(ServicesMonitor.ServiceStatus.DOWN.toString())) { status = NbBundle.getMessage(AutoIngestControlPanel.class, "AutoIngestControlPanel.tbServicesStatusMessage.Message.Down"); sysLogger.log(Level.SEVERE, "Connection to {0} is down", serviceDisplayName); //NON-NLS } - + // if the status update is for an existing service who's status hasn't changed - do nothing. if (statusByService.containsKey(serviceDisplayName) && status.equals(statusByService.get(serviceDisplayName))) { return; } - + statusByService.put(serviceDisplayName, status); displayServicesStatus(); }; - + // Subscribe to all multi-user services in order to display their status Set servicesList = new HashSet<>(); servicesList.add(ServicesMonitor.Service.REMOTE_CASE_DATABASE.toString()); - servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()); + servicesList.add(ServicesMonitor.Service.REMOTE_KEYWORD_SEARCH.toString()); servicesList.add(ServicesMonitor.Service.MESSAGING.toString()); ServicesMonitor.getInstance().addSubscriber(servicesList, propChangeListener); @@ -879,7 +880,7 @@ public final class AutoIngestControlPanel extends JPanel implements Observer { case JOB_COMPLETED: case CASE_DELETED: case REPROCESS_JOB: - case OCR_STATE_CHANGE: + case OCR_STATE_CHANGE: updateExecutor.submit(new UpdateAllJobsTablesTask()); break; case PAUSED_BY_USER_REQUEST: diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java index 24a1e57fb9..cf982d13a3 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsNode.java @@ -53,7 +53,8 @@ final class AutoIngestJobsNode extends AbstractNode { "AutoIngestJobsNode.dataSource.text=Data Source", "AutoIngestJobsNode.hostName.text=Host Name", "AutoIngestJobsNode.stage.text=Stage", - "AutoIngestJobsNode.stageTime.text=Time in Stage", + "# {0} - unitSeparator", + "AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss)", "AutoIngestJobsNode.jobCreated.text=Job Created", "AutoIngestJobsNode.jobCompleted.text=Job Completed", "AutoIngestJobsNode.priority.text=Prioritized", @@ -345,8 +346,10 @@ final class AutoIngestJobsNode extends AbstractNode { jobWrapper.getProcessingHostName())); ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), status.getDescription())); - ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text(), - DurationCellRenderer.longToDurationString((Date.from(Instant.now()).getTime()) - (status.getStartDate().getTime())))); + ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()), + Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()), + Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()), + DurationCellRenderer.longToDurationString(Date.from(Instant.now()).getTime() - status.getStartDate().getTime()))); break; case COMPLETED_JOB: ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), Bundle.AutoIngestJobsNode_jobCreated_text(), @@ -356,7 +359,7 @@ final class AutoIngestJobsNode extends AbstractNode { ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(), Bundle.AutoIngestJobsNode_status_text(), jobWrapper.getErrorsOccurred() ? StatusIconCellRenderer.Status.WARNING : StatusIconCellRenderer.Status.OK)); ss.put(new NodeProperty<>(Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(), Bundle.AutoIngestJobsNode_ocr_text(), - jobWrapper.getOcrEnabled())); + jobWrapper.getOcrEnabled())); break; default: } @@ -377,7 +380,7 @@ final class AutoIngestJobsNode extends AbstractNode { PrioritizationAction.DeprioritizeCaseAction deprioritizeCaseAction = new PrioritizationAction.DeprioritizeCaseAction(jobWrapper.getJob()); deprioritizeCaseAction.setEnabled(jobWrapper.getPriority() > 0); actions.add(deprioritizeCaseAction); - + actions.add(new AutoIngestAdminActions.EnableOCR(jobWrapper.getJob())); AutoIngestAdminActions.DisableOCR disableOCRAction = new AutoIngestAdminActions.DisableOCR(jobWrapper.getJob()); disableOCRAction.setEnabled(jobWrapper.getOcrEnabled() == true); diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsPanel.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsPanel.java index d335a35430..12c929d23d 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsPanel.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestJobsPanel.java @@ -31,6 +31,7 @@ import org.sleuthkit.autopsy.datamodel.EmptyNode; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.AutoIngestJobStatus; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJobsNode.JobNode; import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestNodeRefreshEvents.AutoIngestRefreshEvent; +import org.sleuthkit.autopsy.guiutils.DurationCellRenderer; import org.sleuthkit.autopsy.guiutils.StatusIconCellRenderer; /** @@ -64,6 +65,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa customize(); } + + /** * Set up the AutoIngestJobsPanel's so that its outlineView is displaying * the correct columns for the specified AutoIngestJobStatus @@ -99,7 +102,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa outlineView.setPropertyColumns(Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_dataSource_text(), Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_hostName_text(), Bundle.AutoIngestJobsNode_stage_text(), Bundle.AutoIngestJobsNode_stage_text(), - Bundle.AutoIngestJobsNode_stageTime_text(), Bundle.AutoIngestJobsNode_stageTime_text()); + Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator()), + Bundle.AutoIngestJobsNode_stageTime_text(DurationCellRenderer.getUnitSeperator())); indexOfColumn = getColumnIndexByName(Bundle.AutoIngestJobsNode_caseName_text()); if (indexOfColumn != INVALID_INDEX) { outline.setColumnSorted(indexOfColumn, true, 1); @@ -124,7 +128,7 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa if (indexOfColumn != INVALID_INDEX) { outline.getColumnModel().getColumn(indexOfColumn).setPreferredWidth(INITIAL_OCR_WIDTH); outline.getColumnModel().getColumn(indexOfColumn).setCellRenderer(new OcrIconCellRenderer()); - } + } break; default: } @@ -177,8 +181,8 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa * Update the contents of this AutoIngestJobsPanel while retaining currently * selected node. * - * @param refreshEvent - the AutoIngestRefreshEvent which will provide the new - * contents + * @param refreshEvent - the AutoIngestRefreshEvent which will provide the + * new contents */ void refresh(AutoIngestRefreshEvent refreshEvent) { synchronized (this) { @@ -191,7 +195,6 @@ final class AutoIngestJobsPanel extends javax.swing.JPanel implements ExplorerMa } outline.setRowSelectionAllowed(true); outline.setFocusable(true); - } } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED index 2a1e361537..33de996d54 100755 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/Bundle.properties-MERGED @@ -78,7 +78,8 @@ AutoIngestControlPanel.JobsTableModel.ColumnHeader.ManifestFilePath=\ Manifest F AutoIngestControlPanel.JobsTableModel.ColumnHeader.OCR=OCR AutoIngestControlPanel.JobsTableModel.ColumnHeader.Priority=Prioritized AutoIngestControlPanel.JobsTableModel.ColumnHeader.Stage=Stage -AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage +# {0} - unitSeparator +AutoIngestControlPanel.JobsTableModel.ColumnHeader.StageTime=Time in Stage (dd{0}hh{0}mm{0}ss) AutoIngestControlPanel.JobsTableModel.ColumnHeader.StartedTime=Stage Started AutoIngestControlPanel.JobsTableModel.ColumnHeader.Status=Status AutoIngestControlPanel.OK=OK @@ -140,7 +141,8 @@ AutoIngestJobsNode.prioritized.false=No AutoIngestJobsNode.prioritized.true=Yes AutoIngestJobsNode.priority.text=Prioritized AutoIngestJobsNode.stage.text=Stage -AutoIngestJobsNode.stageTime.text=Time in Stage +# {0} - unitSeparator +AutoIngestJobsNode.stageTime.text=Time in Stage (dd{0}hh{0}mm{0}ss) AutoIngestJobsNode.status.text=Status AutoIngestJobsPanel.waitNode.text=Please Wait... AutoIngestMetricsDialog.initReportText=Select a date above and click the 'Generate Metrics Report' button to generate\na metrics report. diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/FileExportRuleSet.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/FileExportRuleSet.java index 198dd8c5bd..3bc376621b 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/FileExportRuleSet.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/FileExportRuleSet.java @@ -1084,13 +1084,13 @@ final class FileExportRuleSet implements Serializable, Comparable sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -177,7 +177,7 @@ public class ObjectDetectectionFileIngestModule extends FileIngestModuleAdapter /* * Index the artifact for keyword search. */ - blackboard.postArtifact(artifact, MODULE_NAME); + blackboard.postArtifact(artifact, MODULE_NAME, jobId); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS } diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java index 5cd15156a1..0efa6cbfc4 100644 --- a/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java +++ b/Experimental/src/org/sleuthkit/autopsy/experimental/volatilityDSP/VolatilityProcessor.java @@ -1,7 +1,7 @@ /* * Autopsy * - * Copyright 2018 Basis Technology Corp. + * Copyright 2018-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -390,7 +390,7 @@ class VolatilityProcessor { try { // index the artifact for keyword search - blackboard.postArtifact(volArtifact, VOLATILITY); + blackboard.postArtifact(volArtifact, VOLATILITY, null); } catch (Blackboard.BlackboardException ex) { errorMsgs.add(Bundle.VolatilityProcessor_errorMessage_failedToIndexArtifact(pluginName)); /* diff --git a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py index 49e26483d0..375652b6c4 100644 --- a/InternalPythonModules/GPX_Module/GPX_Parser_Module.py +++ b/InternalPythonModules/GPX_Module/GPX_Parser_Module.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -134,7 +134,7 @@ class GPXParserFileIngestModule(FileIngestModule): # Create a GeoArtifactsHelper for this file. geoArtifactHelper = GeoArtifactsHelper( - self.skCase, self.moduleName, None, file) + self.skCase, self.moduleName, None, file, context.getJobId()) if self.writeDebugMsgs: self.log(Level.INFO, "Processing " + file.getUniquePath() + @@ -213,7 +213,7 @@ class GPXParserFileIngestModule(FileIngestModule): art = file.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) - self.blackboard.postArtifact(art, self.moduleName) + self.blackboard.postArtifact(art, self.moduleName, context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error posting GPS bookmark artifact for " + diff --git a/InternalPythonModules/android/browserlocation.py b/InternalPythonModules/android/browserlocation.py index faab73bcfa..ab0d52f709 100644 --- a/InternalPythonModules/android/browserlocation.py +++ b/InternalPythonModules/android/browserlocation.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2018 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -104,9 +104,8 @@ class BrowserLocationAnalyzer(general.AndroidComponentAnalyzer): # NOTE: originally commented out try: - # index the artifact for keyword search blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() - blackboard.postArtifact(artifact, general.MODULE_NAME) + blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId()) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactTypeName()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) diff --git a/InternalPythonModules/android/cachelocation.py b/InternalPythonModules/android/cachelocation.py index 599eb60ca1..15879e99b8 100644 --- a/InternalPythonModules/android/cachelocation.py +++ b/InternalPythonModules/android/cachelocation.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2018 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -102,9 +102,8 @@ class CacheLocationAnalyzer(general.AndroidComponentAnalyzer): # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE.getTypeID(), AndroidModuleFactorymodule.moduleName, accuracy)) # artifact.addAttribute(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID(), AndroidModuleFactorymodule.moduleName, confidence)) try: - # index the artifact for keyword search blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() - blackboard.postArtifact(artifact, general.MODULE_NAME) + blackboard.postArtifact(artifact, general.MODULE_NAME, context.getJobId()) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) diff --git a/InternalPythonModules/android/calllog.py b/InternalPythonModules/android/calllog.py index 13775e80f7..2762f0b869 100644 --- a/InternalPythonModules/android/calllog.py +++ b/InternalPythonModules/android/calllog.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2020 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -83,12 +83,12 @@ class CallLogAnalyzer(general.AndroidComponentAnalyzer): callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, callLogDb.getDBFile(), - Account.Type.PHONE, Account.Type.PHONE, selfAccountId ) + Account.Type.PHONE, Account.Type.PHONE, selfAccountId, context.getJobId()) else: callLogDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, callLogDb.getDBFile(), - Account.Type.PHONE ) + Account.Type.PHONE, context.getJobId()) for tableName in CallLogAnalyzer._tableNames: try: diff --git a/InternalPythonModules/android/contact.py b/InternalPythonModules/android/contact.py index 8144890134..5d5e181fdd 100644 --- a/InternalPythonModules/android/contact.py +++ b/InternalPythonModules/android/contact.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2020 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,7 +75,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer): return for contactDb in contactsDbs: try: - self.__findContactsInDB(contactDb, dataSource) + self.__findContactsInDB(contactDb, dataSource, context) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Contacts", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) @@ -86,7 +86,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer): """ Queries the given contact database and adds Contacts to the case. """ - def __findContactsInDB(self, contactDb, dataSource): + def __findContactsInDB(self, contactDb, dataSource, context): if not contactDb: return @@ -97,7 +97,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer): contactDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, contactDb.getDBFile(), - Account.Type.PHONE ) + Account.Type.PHONE, context.getJobId()) # get display_name, mimetype(email or phone number) and data1 (phonenumber or email address depending on mimetype) # sorted by name, so phonenumber/email would be consecutive for a person if they exist. @@ -158,7 +158,7 @@ class ContactAnalyzer(general.AndroidComponentAnalyzer): phoneNumber, # phoneNumber, None, # homePhoneNumber, None, # mobilePhoneNumber, - emailAddr) # emailAddr + emailAddr, context.getJobId()) # emailAddr except SQLException as ex: self._logger.log(Level.WARNING, "Error processing query result for Android messages.", ex) diff --git a/InternalPythonModules/android/fbmessenger.py b/InternalPythonModules/android/fbmessenger.py index 86dc9cd474..05954e0e9f 100644 --- a/InternalPythonModules/android/fbmessenger.py +++ b/InternalPythonModules/android/fbmessenger.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -148,11 +148,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer): if self.selfAccountId is not None: contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), self._MODULE_NAME, contactsDb.getDBFile(), - Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId ) + Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId()) else: contactsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), self._MODULE_NAME, contactsDb.getDBFile(), - Account.Type.FACEBOOK) + Account.Type.FACEBOOK, context.getJobId()) ## get the other contacts/friends contactsResultSet = contactsDb.runQuery("SELECT fbid, display_name, added_time_ms FROM contacts WHERE added_time_ms <> 0") @@ -492,11 +492,11 @@ class FBMessengerAnalyzer(general.AndroidComponentAnalyzer): if self.selfAccountId is not None: threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), self._MODULE_NAME, threadsDb.getDBFile(), - Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId ) + Account.Type.FACEBOOK, Account.Type.FACEBOOK, self.selfAccountId, context.getJobId()) else: threadsDBHelper = CommunicationArtifactsHelper(self.current_case.getSleuthkitCase(), self._MODULE_NAME, threadsDb.getDBFile(), - Account.Type.FACEBOOK) + Account.Type.FACEBOOK, context.getJobId()) self.analyzeMessages(threadsDb, threadsDBHelper) self.analyzeCallLogs(threadsDb, threadsDBHelper) diff --git a/InternalPythonModules/android/googlemaplocation.py b/InternalPythonModules/android/googlemaplocation.py index 2c33146b21..277b8ef1f5 100644 --- a/InternalPythonModules/android/googlemaplocation.py +++ b/InternalPythonModules/android/googlemaplocation.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2018 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -76,7 +76,7 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer): try: jFile = File(self.current_case.getTempDirectory(), str(abstractFile.getId()) + abstractFile.getName()) ContentUtils.writeToFile(abstractFile, jFile, context.dataSourceIngestIsCancelled) - self.__findGeoLocationsInDB(jFile.toString(), abstractFile) + self.__findGeoLocationsInDB(jFile.toString(), abstractFile, context) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Google map locations", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) @@ -84,13 +84,13 @@ class GoogleMapLocationAnalyzer(general.AndroidComponentAnalyzer): # Error finding Google map locations. pass - def __findGeoLocationsInDB(self, databasePath, abstractFile): + def __findGeoLocationsInDB(self, databasePath, abstractFile, context): if not databasePath: return try: artifactHelper = GeoArtifactsHelper(self.current_case.getSleuthkitCase(), - general.MODULE_NAME, self.PROGRAM_NAME, abstractFile) + general.MODULE_NAME, self.PROGRAM_NAME, abstractFile, context.getJobId()) Class.forName("org.sqlite.JDBC") # load JDBC driver connection = DriverManager.getConnection("jdbc:sqlite:" + databasePath) statement = connection.createStatement() diff --git a/InternalPythonModules/android/imo.py b/InternalPythonModules/android/imo.py index 6898e3693f..7e308340ad 100644 --- a/InternalPythonModules/android/imo.py +++ b/InternalPythonModules/android/imo.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -109,12 +109,12 @@ class IMOAnalyzer(general.AndroidComponentAnalyzer): friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, friendsDb.getDBFile(), - Account.Type.IMO, Account.Type.IMO, selfAccountId ) + Account.Type.IMO, Account.Type.IMO, selfAccountId, context.getJobId()) else: friendsDBHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, friendsDb.getDBFile(), - Account.Type.IMO ) + Account.Type.IMO, context.getJobId()) contactsResultSet = friendsDb.runQuery("SELECT buid, name FROM friends") if contactsResultSet is not None: while contactsResultSet.next(): diff --git a/InternalPythonModules/android/installedapps.py b/InternalPythonModules/android/installedapps.py index ef09a5b5d0..fe80180522 100644 --- a/InternalPythonModules/android/installedapps.py +++ b/InternalPythonModules/android/installedapps.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -75,7 +75,7 @@ class InstalledApplicationsAnalyzer(general.AndroidComponentAnalyzer): try: current_case = Case.getCurrentCaseThrows() libraryDbHelper = ArtifactsHelper(current_case.getSleuthkitCase(), - self._MODULE_NAME, libraryDb.getDBFile()) + self._MODULE_NAME, libraryDb.getDBFile(), context.getJobId()) queryString = "SELECT doc_id, purchase_time FROM ownership" ownershipResultSet = libraryDb.runQuery(queryString) if ownershipResultSet is not None: diff --git a/InternalPythonModules/android/line.py b/InternalPythonModules/android/line.py index 7409f28945..fe39a434c1 100644 --- a/InternalPythonModules/android/line.py +++ b/InternalPythonModules/android/line.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -117,7 +117,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - contact_and_message_db.getDBFile(), Account.Type.LINE) + contact_and_message_db.getDBFile(), Account.Type.LINE, context.getJobId()) self.parse_contacts(contact_and_message_db, helper) self.parse_messages(contact_and_message_db, helper, current_case) @@ -125,7 +125,7 @@ class LineAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - calllog_db.getDBFile(), Account.Type.LINE) + calllog_db.getDBFile(), Account.Type.LINE, context.getJobId()) self.parse_calllogs(dataSource, calllog_db, helper) except NoCurrentCaseException as ex: diff --git a/InternalPythonModules/android/operabrowser.py b/InternalPythonModules/android/operabrowser.py index f1f90b9f04..d9e59defb7 100644 --- a/InternalPythonModules/android/operabrowser.py +++ b/InternalPythonModules/android/operabrowser.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -89,7 +89,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer): for cookiesDb in cookiesDbs: try: cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, cookiesDb.getDBFile()) + self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId()) cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies") if cookiesResultSet is not None: while cookiesResultSet.next(): @@ -119,7 +119,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer): for historyDb in historyDbs: try: historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, historyDb.getDBFile()) + self._MODULE_NAME, historyDb.getDBFile(), context.getJobId()) historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls") if historyResultSet is not None: while historyResultSet.next(): @@ -148,7 +148,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer): for downloadsDb in downloadsDbs: try: downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, downloadsDb.getDBFile()) + self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId()) queryString = "SELECT target_path, start_time, url FROM downloads"\ " INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id" downloadsResultSet = downloadsDb.runQuery(queryString) @@ -177,7 +177,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer): for autofillDb in autofillDbs: try: autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, autofillDb.getDBFile()) + self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId()) autofillsResultSet = autofillDb.runQuery("SELECT name, value, count, date_created FROM autofill") if autofillsResultSet is not None: while autofillsResultSet.next(): @@ -205,7 +205,7 @@ class OperaAnalyzer(general.AndroidComponentAnalyzer): for webFormAddressDb in webFormAddressDbs: try: webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, webFormAddressDb.getDBFile()) + self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId()) queryString = """ SELECT street_address, city, state, zipcode, country_code, date_modified, first_name, last_name, number, email diff --git a/InternalPythonModules/android/oruxmaps.py b/InternalPythonModules/android/oruxmaps.py index 677ea26eea..88dcb6f2f9 100644 --- a/InternalPythonModules/android/oruxmaps.py +++ b/InternalPythonModules/android/oruxmaps.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2018 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -72,7 +72,7 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() skCase = Case.getCurrentCase().getSleuthkitCase() - geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile()) + geoArtifactHelper = GeoArtifactsHelper(skCase, self._MODULE_NAME, self._PROGRAM_NAME, oruxMapsTrackpointsDb.getDBFile(), context.getJobId()) poiQueryString = "SELECT poilat, poilon, poialt, poitime, poiname FROM pois" poisResultSet = oruxMapsTrackpointsDb.runQuery(poiQueryString) @@ -96,9 +96,8 @@ class OruxMapsAnalyzer(general.AndroidComponentAnalyzer): artifact = abstractFile.newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_BOOKMARK), attributes) try: - # index the artifact for keyword search blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard() - blackboard.postArtifact(artifact, self._MODULE_NAME) + blackboard.postArtifact(artifact, self._MODULE_NAME, context.getJobId()) except Blackboard.BlackboardException as ex: self._logger.log(Level.SEVERE, "Unable to index blackboard artifact " + str(artifact.getArtifactID()), ex) self._logger.log(Level.SEVERE, traceback.format_exc()) diff --git a/InternalPythonModules/android/sbrowser.py b/InternalPythonModules/android/sbrowser.py index 41e9790c5b..75b56d5807 100644 --- a/InternalPythonModules/android/sbrowser.py +++ b/InternalPythonModules/android/sbrowser.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -87,7 +87,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for sbrowserDb in sbrowserDbs: try: sbrowserDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, sbrowserDb.getDBFile()) + self._MODULE_NAME, sbrowserDb.getDBFile(), context.getJobId()) bookmarkResultSet = sbrowserDb.runQuery("SELECT url, title, created FROM bookmarks WHERE url IS NOT NULL") if bookmarkResultSet is not None: while bookmarkResultSet.next(): @@ -115,7 +115,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for cookiesDb in cookiesDbs: try: cookiesDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, cookiesDb.getDBFile()) + self._MODULE_NAME, cookiesDb.getDBFile(), context.getJobId()) cookiesResultSet = cookiesDb.runQuery("SELECT host_key, name, value, creation_utc FROM cookies") if cookiesResultSet is not None: while cookiesResultSet.next(): @@ -145,7 +145,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for historyDb in historyDbs: try: historyDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, historyDb.getDBFile()) + self._MODULE_NAME, historyDb.getDBFile(), context.getJobId()) historyResultSet = historyDb.runQuery("SELECT url, title, last_visit_time FROM urls") if historyResultSet is not None: while historyResultSet.next(): @@ -174,7 +174,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for downloadsDb in downloadsDbs: try: downloadsDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, downloadsDb.getDBFile()) + self._MODULE_NAME, downloadsDb.getDBFile(), context.getJobId()) queryString = "SELECT target_path, start_time, url FROM downloads"\ " INNER JOIN downloads_url_chains ON downloads.id = downloads_url_chains.id" downloadsResultSet = downloadsDb.runQuery(queryString) @@ -203,7 +203,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for autofillDb in autofillDbs: try: autofillDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, autofillDb.getDBFile()) + self._MODULE_NAME, autofillDb.getDBFile(), context.getJobId()) queryString = """ SELECT name, value, count, date_created FROM autofill @@ -236,7 +236,7 @@ class SBrowserAnalyzer(general.AndroidComponentAnalyzer): for webFormAddressDb in webFormAddressDbs: try: webFormAddressDbHelper = WebBrowserArtifactsHelper(self.current_case.getSleuthkitCase(), - self._MODULE_NAME, webFormAddressDb.getDBFile()) + self._MODULE_NAME, webFormAddressDb.getDBFile(), context.getJobId()) """ Autofill form data is split across multiple tables. The quqery below joins the various tables. """ diff --git a/InternalPythonModules/android/shareit.py b/InternalPythonModules/android/shareit.py index dc9c549f23..b1f0af8314 100644 --- a/InternalPythonModules/android/shareit.py +++ b/InternalPythonModules/android/shareit.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -85,7 +85,7 @@ class ShareItAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() historyDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._MODULE_NAME, historyDb.getDBFile(), - Account.Type.SHAREIT) + Account.Type.SHAREIT, context.getJobId()) queryString = """ SELECT history_type, device_id, device_name, description, timestamp, file_path diff --git a/InternalPythonModules/android/skype.py b/InternalPythonModules/android/skype.py index 908a7da451..fbf185dfbe 100644 --- a/InternalPythonModules/android/skype.py +++ b/InternalPythonModules/android/skype.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -129,13 +129,13 @@ class SkypeAnalyzer(general.AndroidComponentAnalyzer): if user_account_instance is None: helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - skype_db.getDBFile(), Account.Type.SKYPE + skype_db.getDBFile(), Account.Type.SKYPE, context.getJobId() ) else: helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, skype_db.getDBFile(), Account.Type.SKYPE, - Account.Type.SKYPE, user_account_instance + Account.Type.SKYPE, user_account_instance, context.getJobId() ) self.parse_contacts(skype_db, helper) self.parse_calllogs(skype_db, helper) diff --git a/InternalPythonModules/android/tangomessage.py b/InternalPythonModules/android/tangomessage.py index a7b9cd888d..bb5256781b 100644 --- a/InternalPythonModules/android/tangomessage.py +++ b/InternalPythonModules/android/tangomessage.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2020 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -72,7 +72,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer): tangoDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "tc.db", True, self._PACKAGE_NAME) for tangoDbFile in tangoDbFiles: try: - self.__findTangoMessagesInDB(tangoDbFile, dataSource) + self.__findTangoMessagesInDB(tangoDbFile, dataSource, context) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing Tango messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) @@ -80,7 +80,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer): # Error finding Tango messages. pass - def __findTangoMessagesInDB(self, tangoDb, dataSource): + def __findTangoMessagesInDB(self, tangoDb, dataSource, context): if not tangoDb: return @@ -91,7 +91,7 @@ class TangoMessageAnalyzer(general.AndroidComponentAnalyzer): tangoDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, tangoDb.getDBFile(), - Account.Type.TANGO ) + Account.Type.TANGO, context.getJobId()) resultSet = tangoDb.runQuery( "SELECT conv_id, create_time, direction, payload FROM messages ORDER BY create_time DESC;") diff --git a/InternalPythonModules/android/textmessage.py b/InternalPythonModules/android/textmessage.py index 3c46ea7d2d..05777d0c69 100644 --- a/InternalPythonModules/android/textmessage.py +++ b/InternalPythonModules/android/textmessage.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2020 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -80,12 +80,12 @@ class TextMessageAnalyzer(general.AndroidComponentAnalyzer): messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, messageDb.getDBFile(), - Account.Type.PHONE, Account.Type.IMO, selfAccountId ) + Account.Type.PHONE, Account.Type.IMO, selfAccountId, context.getJobId()) else: messageDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, messageDb.getDBFile(), - Account.Type.PHONE ) + Account.Type.PHONE, context.getJobId()) uuid = UUID.randomUUID().toString() messagesResultSet = messageDb.runQuery("SELECT address, date, read, type, subject, body, thread_id FROM sms;") diff --git a/InternalPythonModules/android/textnow.py b/InternalPythonModules/android/textnow.py index 005e1191dd..1043dab1a0 100644 --- a/InternalPythonModules/android/textnow.py +++ b/InternalPythonModules/android/textnow.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -91,7 +91,7 @@ class TextNowAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - textnow_db.getDBFile(), Account.Type.TEXTNOW + textnow_db.getDBFile(), Account.Type.TEXTNOW, context.getJobId() ) self.parse_contacts(textnow_db, helper) self.parse_calllogs(textnow_db, helper) diff --git a/InternalPythonModules/android/viber.py b/InternalPythonModules/android/viber.py index cd8fed0854..9626f5d285 100644 --- a/InternalPythonModules/android/viber.py +++ b/InternalPythonModules/android/viber.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -91,7 +91,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - contact_and_calllog_db.getDBFile(), Account.Type.VIBER) + contact_and_calllog_db.getDBFile(), Account.Type.VIBER, context.getJobId()) self.parse_contacts(contact_and_calllog_db, helper) self.parse_calllogs(contact_and_calllog_db, helper) @@ -100,7 +100,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - message_db.getDBFile(), Account.Type.VIBER) + message_db.getDBFile(), Account.Type.VIBER, context.getJobId()) self.parse_messages(message_db, helper, current_case) except NoCurrentCaseException as ex: @@ -131,9 +131,7 @@ class ViberAnalyzer(general.AndroidComponentAnalyzer): attributes = ArrayList() attributes.add(BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), self._PARSER_NAME, contacts_parser.get_contact_name())) artifact = contacts_db.getDBFile().newDataArtifact(BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes) - - # Post the artifact to blackboard - current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME) + current_case.getBlackboard().postArtifact(artifact, self._PARSER_NAME, context.getJobId()) contacts_parser.close() except SQLException as ex: diff --git a/InternalPythonModules/android/whatsapp.py b/InternalPythonModules/android/whatsapp.py index e392fdf24c..6d9e0b5ea7 100644 --- a/InternalPythonModules/android/whatsapp.py +++ b/InternalPythonModules/android/whatsapp.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -145,14 +145,14 @@ class WhatsAppAnalyzer(general.AndroidComponentAnalyzer): current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - contact_db.getDBFile(), Account.Type.WHATSAPP) + contact_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId()) self.parse_contacts(contact_db, helper) for calllog_and_message_db in calllog_and_message_dbs: current_case = Case.getCurrentCaseThrows() helper = CommunicationArtifactsHelper( current_case.getSleuthkitCase(), self._PARSER_NAME, - calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP) + calllog_and_message_db.getDBFile(), Account.Type.WHATSAPP, context.getJobId()) self.parse_calllogs(calllog_and_message_db, helper) self.parse_messages(dataSource, calllog_and_message_db, helper, current_case) diff --git a/InternalPythonModules/android/wwfmessage.py b/InternalPythonModules/android/wwfmessage.py index da3d343ad3..9cb95a411a 100644 --- a/InternalPythonModules/android/wwfmessage.py +++ b/InternalPythonModules/android/wwfmessage.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2016-2020 Basis Technology Corp. +Copyright 2016-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -78,7 +78,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer): wwfDbFiles = AppSQLiteDB.findAppDatabases(dataSource, "WordsFramework", True, self._PACKAGE_NAME) for wwfDbFile in wwfDbFiles: try: - self.__findWWFMessagesInDB(wwfDbFile, dataSource) + self.__findWWFMessagesInDB(wwfDbFile, dataSource, context) except Exception as ex: self._logger.log(Level.SEVERE, "Error parsing WWF messages", ex) self._logger.log(Level.SEVERE, traceback.format_exc()) @@ -88,7 +88,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer): self._logger.log(Level.SEVERE, traceback.format_exc()) pass - def __findWWFMessagesInDB(self, wwfDb, dataSource): + def __findWWFMessagesInDB(self, wwfDb, dataSource, context): if not wwfDb: return @@ -98,7 +98,7 @@ class WWFMessageAnalyzer(general.AndroidComponentAnalyzer): wwfDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._PARSER_NAME, wwfDb.getDBFile(), - wwfAccountType ) + wwfAccountType, context.getJobId()) uuid = UUID.randomUUID().toString() diff --git a/InternalPythonModules/android/xender.py b/InternalPythonModules/android/xender.py index 2ca86d2045..b1d1dcc8e3 100644 --- a/InternalPythonModules/android/xender.py +++ b/InternalPythonModules/android/xender.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -91,11 +91,11 @@ class XenderAnalyzer(general.AndroidComponentAnalyzer): if selfAccountId is not None: transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._MODULE_NAME, transactionDb.getDBFile(), - Account.Type.XENDER, Account.Type.XENDER, selfAccountId ) + Account.Type.XENDER, Account.Type.XENDER, selfAccountId, context.getJobId()) else: transactionDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._MODULE_NAME, transactionDb.getDBFile(), - Account.Type.XENDER) + Account.Type.XENDER, context.getJobId()) queryString = """ SELECT f_path, f_display_name, f_size_str, c_start_time, c_direction, c_session_id, diff --git a/InternalPythonModules/android/zapya.py b/InternalPythonModules/android/zapya.py index 2801b17b68..8f653f2531 100644 --- a/InternalPythonModules/android/zapya.py +++ b/InternalPythonModules/android/zapya.py @@ -1,7 +1,7 @@ """ Autopsy Forensic Browser -Copyright 2019-2020 Basis Technology Corp. +Copyright 2019-2021 Basis Technology Corp. Contact: carrier sleuthkit org Licensed under the Apache License, Version 2.0 (the "License"); @@ -81,7 +81,7 @@ class ZapyaAnalyzer(general.AndroidComponentAnalyzer): # transferDbHelper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), self._MODULE_NAME, transferDb.getDBFile(), - Account.Type.ZAPYA) + Account.Type.ZAPYA, context.getJobId()) queryString = "SELECT device, name, direction, createtime, path, title FROM transfer" transfersResultSet = transferDb.runQuery(queryString) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java index d01e8837c3..4dfc014598 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/AdHocSearchChildFactory.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -426,7 +426,7 @@ class AdHocSearchChildFactory extends ChildFactory { final String queryDisp = queryStr.length() > QUERY_DISPLAY_LEN ? queryStr.substring(0, QUERY_DISPLAY_LEN - 1) + " ..." : queryStr; try { progress = ProgressHandle.createHandle(NbBundle.getMessage(this.getClass(), "KeywordSearchResultFactory.progress.saving", queryDisp), () -> BlackboardResultWriter.this.cancel(true)); - hits.process(progress, null, this, false, saveResults); + hits.process(progress, null, this, false, saveResults, null); } finally { finalizeWorker(); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java index 937c9567fd..383abbd3af 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/IngestSearchRunner.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2014 - 2017 Basis Technology Corp. + * Copyright 2014 - 2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -564,7 +564,7 @@ final class IngestSearchRunner { subProgresses[keywordsSearched].progress(keywordList.getName() + ": " + queryDisplayStr, unitProgress); // Create blackboard artifacts - newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true); + newResults.process(null, subProgresses[keywordsSearched], this, keywordList.getIngestMessages(), true, job.getJobId()); } //if has results diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index d350bfea6d..3140916f5e 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -648,7 +648,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule { } if (!bbartifacts.isEmpty()) { try { - Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName); + Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard().postArtifacts(bbartifacts, moduleName, jobId); } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { // Log error and return to continue processing logger.log(Level.WARNING, String.format("Unable to post blackboard artifacts for file $s.", aFile.getParentPath() + aFile.getName()), ex); //NON-NLS diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java index ce72c2cf69..40ed7db43d 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/QueryResults.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2018 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -35,7 +35,8 @@ import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.EscapeUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestMessage; -import org.sleuthkit.autopsy.ingest.IngestServices;; +import org.sleuthkit.autopsy.ingest.IngestServices; +; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -64,7 +65,7 @@ class QueryResults { * and publishing an event to notify subscribers of the blackboard posts. * * The KeywordSearchQuery is used to do the blackboard posts. - * + * * @param query The query. */ QueryResults(KeywordSearchQuery query) { @@ -141,9 +142,10 @@ class QueryResults { * messages inbox if there is a keyword hit in the text * exrtacted from the text source object. * @param saveResults Flag whether to save search results as KWS artifacts. - * + * @param ingestJobId The numeric identifier of the ingest job within which + * the artifacts are being created, may be null. */ - void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults) { + void process(ProgressHandle progress, ProgressContributor subProgress, SwingWorker worker, boolean notifyInbox, boolean saveResults, Long ingestJobId) { /* * Initialize the progress indicator to the number of keywords that will * be processed. @@ -218,15 +220,15 @@ class QueryResults { } catch (TskCoreException | NoCurrentCaseException tskCoreException) { logger.log(Level.SEVERE, "Failed to get text source object for keyword hit", tskCoreException); //NON-NLS } - + if ((content != null) && saveResults) { /* - * Post an artifact for the hit to the blackboard. + * Post an artifact for the hit to the blackboard. */ BlackboardArtifact artifact = query.createKeywordHitArtifact(content, keyword, hit, snippet, query.getKeywordList().getName()); /* - * Send an ingest inbox message for the hit. + * Send an ingest inbox message for the hit. */ if (null != artifact) { hitArtifacts.add(artifact); @@ -253,7 +255,7 @@ class QueryResults { SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase(); Blackboard blackboard = tskCase.getBlackboard(); - blackboard.postArtifacts(hitArtifacts, MODULE_NAME); + blackboard.postArtifacts(hitArtifacts, MODULE_NAME, ingestJobId); } catch (NoCurrentCaseException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Failed to post KWH artifact to blackboard.", ex); //NON-NLS } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties index 27440f0cdf..602dc5b0a0 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties @@ -2,7 +2,7 @@ OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module -Chrome.moduleName=Chromium +Chrome.moduleName=Chromium Analyzer Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files. Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files. Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1} @@ -19,7 +19,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} -ExtractIE.moduleName.text=Internet Explorer +ExtractIE.moduleName.text=Internet Explorer Analyzer ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks. ExtractIE.parentModuleName.noSpace=RecentActivity ExtractIE.parentModuleName=Recent Activity @@ -35,7 +35,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1} ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1} ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry. -ExtractRegistry.moduleName.text=Registry +ExtractRegistry.moduleName.text=Windows Registry Analyzer ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0} ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1} ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1} @@ -43,7 +43,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity ExtractRegistry.programName=RegRipper ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} -Firefox.moduleName=FireFox +Firefox.moduleName=FireFox Analyzer Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1} @@ -85,12 +85,12 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1} RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity RecentDocumentsByLnk.parentModuleName=Recent Activity -SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine +SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity -ExtractWebAccountType.moduleName.text=Web Account Type +ExtractWebAccountType.moduleName.text=Web Account Type Analyzer ExtractWebAccountType.parentModuleName=Recent Activity UsbDeviceIdMapper.parseAndLookup.text=Product: {0} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED index 3aadf1914b..c73bf456d5 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Bundle.properties-MERGED @@ -4,7 +4,6 @@ cannotParseXml=Unable to parse XML file: ChromeCacheExtract_adding_artifacts_msg=Chrome Cache: Adding %d artifacts for analysis. ChromeCacheExtract_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis. ChromeCacheExtract_loading_files_msg=Chrome Cache: Loading files from %s. -ChromeCacheExtractor.moduleName=ChromeCacheExtractor # {0} - module name # {1} - row number # {2} - table length @@ -13,25 +12,26 @@ ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries DataSourceUsage_AndroidMedia=Android Media Card DataSourceUsage_DJU_Drone_DAT=DJI Internal SD Card DataSourceUsage_FlashDrive=Flash Drive +# {0} - OS name DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0}) -DataSourceUsageAnalyzer.parentModuleName=Recent Activity +DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer DefaultPriorityDomainCategorizer_searchEngineCategory=Search Engine -DomainCategoryRunner_moduleName_text=DomainCategoryRunner +DomainCategoryRunner_moduleName_text=Domain Category Analyzer DomainCategoryRunner_parentModuleName=Recent Activity DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types -Extract.indexError.message=Failed to index artifact for keyword search. -Extract.noOpenCase.errMsg=No open case available. ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history -ExtractEdge_Module_Name=Microsoft Edge +ExtractEdge_Module_Name=Microsoft Edge Analyzer ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file ExtractEdge_process_errMsg_unableFindESEViewer=Unable to find ESEDatabaseViewer ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file +# {0} - sub module name ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history ExtractOs.androidOs.label=Android ExtractOs.androidVolume.label=OS Drive (Android) ExtractOs.debianLinuxOs.label=Linux (Debian) ExtractOs.debianLinuxVolume.label=OS Drive (Linux Debian) +ExtractOs.displayName=OS Info Analyzer ExtractOs.fedoraLinuxOs.label=Linux (Fedora) ExtractOs.fedoraLinuxVolume.label=OS Drive (Linux Fedora) ExtractOs.gentooLinuxOs.label=Linux (Gentoo) @@ -42,7 +42,6 @@ ExtractOs.novellSUSEOs.label=Linux (Novell SUSE) ExtractOs.novellSUSEVolume.label=OS Drive (Linux Novell SUSE) ExtractOs.osx.label=Mac OS X ExtractOs.osxVolume.label=OS Drive (OS X) -ExtractOs.parentModuleName=Recent Activity ExtractOs.redhatLinuxOs.label=Linux (Redhat) ExtractOs.redhatLinuxVolume.label=OS Drive (Linux Redhat) ExtractOs.slackwareLinuxOs.label=Linux (Slackware) @@ -59,16 +58,17 @@ ExtractOs.windowsVolume.label=OS Drive (Windows) ExtractOs.yellowDogLinuxOs.label=Linux (Yellow Dog) ExtractOs.yellowDogLinuxVolume.label=OS Drive (Linux Yellow Dog) ExtractOS_progressMessage=Checking for OS +# {0} - sub module name ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files -ExtractPrefetch_module_name=Windows Prefetch Extractor -ExtractRecycleBin_module_name=Recycle Bin +ExtractPrefetch_module_name=Windows Prefetch Analyzer +ExtractRecycleBin_module_name=Recycle Bin Analyzer ExtractRecycleBin_Recyle_Bin_Display_Name=Recycle Bin ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files. ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files -ExtractSafari_Module_Name=Safari +ExtractSafari_Module_Name=Safari Analyzer ExtractSru_error_finding_export_srudb_program=Error finding export_srudb program -ExtractSru_module_name=System Resource Usage Extractor +ExtractSru_module_name=System Resource Usage Analyzer ExtractSru_process_error_executing_export_srudb_program=Error running export_srudb program ExtractSru_process_errormsg_find_software_hive=Unable to find SOFTWARE HIVE file ExtractSru_process_errormsg_find_srudb_dat=Unable to find srudb.dat file @@ -77,6 +77,7 @@ ExtractSru_process_errormsg_write_srudb_dat=Unable to write srudb.dat file ExtractWebAccountType.role.admin=Administrator role ExtractWebAccountType.role.moderator=Moderator role ExtractWebAccountType.role.user=User role +ExtractZone_displayName=\ Zone Identifier Analyzer ExtractZone_Internet=Internet Zone ExtractZone_Local_Intranet=Local Intranet Zone ExtractZone_Local_Machine=Local Machine Zone @@ -86,12 +87,12 @@ ExtractZone_progress_Msg=Extracting :Zone.Identifer files ExtractZone_Restricted=Restricted Sites Zone ExtractZone_Trusted=Trusted Sites Zone Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis. -Jumplist_module_name=Windows Jumplist Extractor +Jumplist_module_name=Windows Jumplist Analyzer OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Long-Description=Recent Activity ingest module.\n\n The module extracts useful information about the recent user activity on the disk image being ingested, such as:\n\n- Recently open documents,\n- Web activity (sites visited, stored cookies, book marked sites, search engine queries, file downloads),\n- Recently attached devices,\n- Installed programs.\n\nThe module currently supports Windows only disk images.\nThe plugin is also fully functional when deployed on Windows version of Autopsy. OpenIDE-Module-Name=RecentActivity OpenIDE-Module-Short-Description=Recent Activity finder ingest module -Chrome.moduleName=Chromium +Chrome.moduleName=Chromium Analyzer Chrome.getHistory.errMsg.errGettingFiles=Error when trying to get Chrome history files. Chrome.getHistory.errMsg.couldntFindAnyFiles=Could not find any allocated Chrome history files. Chrome.getHistory.errMsg.errAnalyzingFile={0}: Error while trying to analyze file:{1} @@ -108,7 +109,7 @@ Chrome.getLogin.errMsg.errGettingFiles=Error when trying to get Chrome history f Chrome.getLogin.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} Chrome.getAutofill.errMsg.errGettingFiles=Error when trying to get Chrome Web Data files. Chrome.getAutofill.errMsg.errAnalyzingFiles={0}: Error while trying to analyze file:{1} -ExtractIE.moduleName.text=Internet Explorer +ExtractIE.moduleName.text=Internet Explorer Analyzer ExtractIE.getBookmark.errMsg.errGettingBookmarks={0}: Error getting Internet Explorer Bookmarks. ExtractIE.parentModuleName.noSpace=RecentActivity ExtractIE.parentModuleName=Recent Activity @@ -124,7 +125,7 @@ ExtractIE.getHistory.errMsg.errProcHist={0}: Error processing Internet Explorer ExtractIE.parsePascoOutput.errMsg.notFound={0}: Pasco output not found: {1} ExtractIE.parsePascoOutput.errMsg.errParsing={0}: Error parsing IE history entry {1} ExtractIE.parsePascoOutput.errMsg.errParsingEntry={0}: Error parsing Internet Explorer History entry. -ExtractRegistry.moduleName.text=Registry +ExtractRegistry.moduleName.text=Windows Registry Analyzer ExtractRegistry.findRegFiles.errMsg.errReadingFile=Error fetching registry file: {0} ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp={0}: Error analyzing registry file {1} ExtractRegistry.analyzeRegFiles.failedParsingResults={0}: Failed parsing registry file results {1} @@ -132,7 +133,7 @@ ExtractRegistry.parentModuleName.noSpace=RecentActivity ExtractRegistry.programName=RegRipper ExtractRegistry.analyzeRegFiles.errMsg.errReadingRegFile={0}: Error reading registry file - {1} ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile={0}: Failed to analyze registry file {1} -Firefox.moduleName=FireFox +Firefox.moduleName=FireFox Analyzer Firefox.getHistory.errMsg.errFetchingFiles=Error fetching internet history files for Firefox. Firefox.getHistory.errMsg.noFilesFound=No FireFox history files found. Firefox.getHistory.errMsg.errAnalyzeFile={0}: Error while trying to analyze file:{1} @@ -212,6 +213,7 @@ RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles={0}: Error getting lnk File RecentDocumentsByLnk.getRecDoc.errParsingFile={0}: Error parsing Recent File {1} RecentDocumentsByLnk.parentModuleName.noSpace=RecentActivity RecentDocumentsByLnk.parentModuleName=Recent Activity +RecentDocumentsByLnk_displayName=Recent Documents by Link Analyzer Recently_Used_Artifacts_Adobe=Recently opened according to Adobe MRU Recently_Used_Artifacts_Applets=Recently opened according to Applets registry key Recently_Used_Artifacts_ArcHistory=Recently opened by 7Zip @@ -223,14 +225,15 @@ Recently_Used_Artifacts_Winrar=Recently opened according to WinRAR MRU Registry_System_Bam=Recently Executed according to Background Activity Moderator (BAM) RegRipperFullNotFound=Full version RegRipper executable not found. RegRipperNotFound=Autopsy RegRipper executable not found. +# {0} - file name SearchEngineURLQueryAnalyzer.init.exception.msg=Unable to find {0}. -SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine +SearchEngineURLQueryAnalyzer.moduleName.text=Search Engine Query Analyzer SearchEngineURLQueryAnalyzer.engineName.none=NONE SearchEngineURLQueryAnalyzer.domainSubStr.none=NONE SearchEngineURLQueryAnalyzer.toString=Name: {0}\nDomain Substring: {1}\nCount: {2}\nSplit Tokens: \n{3} SearchEngineURLQueryAnalyzer.parentModuleName.noSpace=RecentActivity SearchEngineURLQueryAnalyzer.parentModuleName=Recent Activity -ExtractWebAccountType.moduleName.text=Web Account Type +ExtractWebAccountType.moduleName.text=Web Account Type Analyzer ExtractWebAccountType.parentModuleName=Recent Activity Shellbag_Artifact_Display_Name=Shell Bags Shellbag_Key_Attribute_Display_Name=Key diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java index 616b13d41f..e2cbcc7658 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ChromeCacheExtractor.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * * Project Contact/Architect: carrier sleuthkit org * @@ -151,15 +151,14 @@ final class ChromeCacheExtractor { } @NbBundle.Messages({ - "ChromeCacheExtractor.moduleName=ChromeCacheExtractor", "# {0} - module name", "# {1} - row number", "# {2} - table length", "# {3} - cache path", "ChromeCacheExtractor.progressMsg={0}: Extracting cache entry {1} of {2} entries from {3}" }) - ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar ) { - moduleName = Bundle.ChromeCacheExtractor_moduleName(); + ChromeCacheExtractor(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + moduleName = NbBundle.getMessage(Chromium.class, "Chrome.moduleName"); this.dataSource = dataSource; this.context = context; this.progressBar = progressBar; @@ -415,7 +414,7 @@ final class ChromeCacheExtractor { progressBar.progress(String.format(Bundle.ChromeCacheExtract_adding_artifacts_msg(), artifactsAdded.size())); Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard(); try { - blackboard.postArtifacts(artifactsAdded, moduleName); + blackboard.postArtifacts(artifactsAdded, moduleName, context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.WARNING, String.format("Failed to post cacheIndex artifacts "), ex); //NON-NLS } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java index 371394ff10..357f10b8a7 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chromium.java @@ -54,8 +54,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -69,7 +67,7 @@ import org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper; * Chromium recent activity extraction */ class Chromium extends Extract { - + private static final String HISTORY_QUERY = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, " //NON-NLS + "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) AS from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url"; //NON-NLS private static final String COOKIE_QUERY = "SELECT name, value, host_key, expires_utc,last_access_utc, creation_utc FROM cookies"; //NON-NLS @@ -94,13 +92,13 @@ class Chromium extends Extract { private static final String WEB_DATA_FILE_NAME = "Web Data"; private static final String UC_BROWSER_NAME = "UC Browser"; private static final String ENCRYPTED_FIELD_MESSAGE = "The data was encrypted."; - + private Boolean databaseEncrypted = false; private Boolean fieldEncrypted = false; private final Logger logger = Logger.getLogger(this.getClass().getName()); private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; private static final Map BROWSERS_MAP = ImmutableMap.builder() .put("Microsoft Edge", "Microsoft/Edge/User Data/Default") @@ -127,20 +125,19 @@ class Chromium extends Extract { "Progress_Message_Chrome_Logins=Chrome Logins Browser {0}", "Progress_Message_Chrome_Cache=Chrome Cache",}) - Chromium() { - super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName")); + Chromium(IngestJobContext context) { + super(NbBundle.getMessage(Chromium.class, "Chrome.moduleName"), context); + this.context = context; } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; dataFound = false; long ingestJobId = context.getJobId(); for (Map.Entry browser : BROWSERS_MAP.entrySet()) { String browserName = browser.getKey(); - String browserLocation = browser.getValue(); progressBar.progress(NbBundle.getMessage(this.getClass(), "Progress_Message_Chrome_History", browserName)); this.getHistory(browser.getKey(), browser.getValue(), ingestJobId); if (context.dataSourceIngestIsCancelled()) { @@ -181,14 +178,14 @@ class Chromium extends Extract { progressBar.progress(Bundle.Progress_Message_Chrome_Cache()); ChromeCacheExtractor chromeCacheExtractor = new ChromeCacheExtractor(dataSource, context, progressBar); chromeCacheExtractor.processCaches(); - } /** * Query for history databases and add artifacts - * @param browser - * @param browserLocation - * @param ingestJobId The ingest job id. + * + * @param browser + * @param browserLocation + * @param ingestJobId The ingest job id. */ private void getHistory(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -202,7 +199,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -238,13 +235,13 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome web history artifacts file '%s' (id=%d).", historyFile.getName(), historyFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile", - this.getName(), historyFile.getName())); + this.getDisplayName(), historyFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome web history artifacts file '%s' (id=%d).", temps, historyFile.getName(), historyFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getHistory.errMsg.errAnalyzingFile", - this.getName(), historyFile.getName())); + this.getDisplayName(), historyFile.getName())); continue; } File dbFile = new File(temps); @@ -253,8 +250,8 @@ class Chromium extends Extract { break; } List> tempList; - tempList = this.dbConnect(temps, HISTORY_QUERY); - logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + tempList = this.querySQLiteDb(temps, HISTORY_QUERY); + logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, @@ -276,7 +273,7 @@ class Chromium extends Extract { (NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, historyFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create history artifact for file (%d)", historyFile.getId()), ex); } @@ -291,9 +288,10 @@ class Chromium extends Extract { /** * Search for bookmark files and make artifacts. + * * @param browser - * @param browserLocation - * @param ingestJobId The ingest job id. + * @param browserLocation + * @param ingestJobId The ingest job id. */ private void getBookmark(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -307,7 +305,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -319,7 +317,6 @@ class Chromium extends Extract { dataFound = true; Collection bbartifacts = new ArrayList<>(); int j = 0; - while (j < bookmarkFiles.size()) { AbstractFile bookmarkFile = bookmarkFiles.get(j++); if ((bookmarkFile.getSize() == 0) || (bookmarkFile.getName().toLowerCase().contains("-slack")) @@ -335,17 +332,17 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome bookmark artifacts file '%s' (id=%d).", bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile", - this.getName(), bookmarkFile.getName())); + this.getDisplayName(), bookmarkFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome bookmark artifacts file '%s' (id=%d).", temps, bookmarkFile.getName(), bookmarkFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile", - this.getName(), bookmarkFile.getName())); + this.getDisplayName(), bookmarkFile.getName())); continue; } - logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getDisplayName(), temps}); //NON-NLS File dbFile = new File(temps); if (context.dataSourceIngestIsCancelled()) { dbFile.delete(); @@ -374,7 +371,7 @@ class Chromium extends Extract { } catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) { logger.log(Level.WARNING, "Error parsing Json from Chrome Bookmark.", ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getBookmark.errMsg.errAnalyzingFile3", - this.getName(), bookmarkFile.getName())); + this.getDisplayName(), bookmarkFile.getName())); continue; } @@ -419,14 +416,14 @@ class Chromium extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); try { - bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create bookmark artifact for file (%d)", bookmarkFile.getId()), ex); } - + } - - if(!context.dataSourceIngestIsCancelled()) { + + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } bbartifacts.clear(); @@ -436,9 +433,10 @@ class Chromium extends Extract { /** * Queries for cookie files and adds artifacts + * * @param browser * @param browserLocation - * @param ingestJobId The ingest job id. + * @param ingestJobId The ingest job id. */ private void getCookie(String browser, String browserLocation, long ingestJobId) { @@ -455,7 +453,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -479,13 +477,13 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome cookie artifacts file '%s' (id=%d).", cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile", - this.getName(), cookiesFile.getName())); + this.getDisplayName(), cookiesFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome cookie artifacts file '%s' (id=%d).", temps, cookiesFile.getName(), cookiesFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getCookie.errMsg.errAnalyzeFile", - this.getName(), cookiesFile.getName())); + this.getDisplayName(), cookiesFile.getName())); continue; } File dbFile = new File(temps); @@ -494,8 +492,8 @@ class Chromium extends Extract { break; } - List> tempList = this.dbConnect(temps, COOKIE_QUERY); - logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, COOKIE_QUERY); + logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, @@ -519,7 +517,7 @@ class Chromium extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), domain)); try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create cookie artifact for file (%d)", cookiesFile.getId()), ex); } @@ -535,9 +533,10 @@ class Chromium extends Extract { /** * Queries for download files and adds artifacts + * * @param browser * @param browserLocation - * @param ingestJobId The ingest job id. + * @param ingestJobId The ingest job id. */ private void getDownload(String browser, String browserLocation, long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); @@ -551,7 +550,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -577,13 +576,13 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome download artifacts file '%s' (id=%d).", downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1", - this.getName(), downloadFile.getName())); + this.getDisplayName(), downloadFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome download artifacts file '%s' (id=%d).", temps, downloadFile.getName(), downloadFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getDownload.errMsg.errAnalyzeFiles1", - this.getName(), downloadFile.getName())); + this.getDisplayName(), downloadFile.getName())); continue; } File dbFile = new File(temps); @@ -595,12 +594,12 @@ class Chromium extends Extract { List> tempList; if (isChromePreVersion30(temps)) { - tempList = this.dbConnect(temps, DOWNLOAD_QUERY); + tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY); } else { - tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V30); + tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY_V30); } - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); String fullPath = result.get("full_path").toString(); //NON-NLS @@ -628,9 +627,9 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, RecentActivityExtracterModuleFactory.getModuleName(), browser)); - // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. + // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. try { - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, downloadFile, bbattributes); bbartifacts.add(webDownloadArtifact); String normalizedFullPath = FilenameUtils.normalize(fullPath, true); for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(normalizedFullPath), FilenameUtils.getPath(normalizedFullPath))) { @@ -652,9 +651,10 @@ class Chromium extends Extract { /** * Gets user logins from Login Data sqlite database + * * @param browser * @param browserLocation - * @param ingestJobId The ingest job id. + * @param ingestJobId The ingest job id. */ private void getLogins(String browser, String browserLocation, long ingestJobId) { @@ -670,7 +670,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -694,13 +694,13 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome login artifacts file '%s' (id=%d).", loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", - this.getName(), loginDataFile.getName())); + this.getDisplayName(), loginDataFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome login artifacts file '%s' (id=%d).", temps, loginDataFile.getName(), loginDataFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", - this.getName(), loginDataFile.getName())); + this.getDisplayName(), loginDataFile.getName())); continue; } File dbFile = new File(temps); @@ -708,8 +708,8 @@ class Chromium extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, LOGIN_QUERY); - logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, LOGIN_QUERY); + logger.log(Level.INFO, "{0}- Now getting login information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -741,7 +741,7 @@ class Chromium extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), browser)); try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_SERVICE_ACCOUNT, loginDataFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create service account artifact for file (%d)", loginDataFile.getId()), ex); } @@ -758,9 +758,10 @@ class Chromium extends Extract { /** * Gets and parses Autofill data from 'Web Data' database, and creates * TSK_WEB_FORM_AUTOFILL, TSK_WEB_FORM_ADDRESS artifacts + * * @param browser * @param browserLocation - * @param ingestJobId The ingest job id. + * @param ingestJobId The ingest job id. */ private void getAutofill(String browser, String browserLocation, long ingestJobId) { @@ -776,7 +777,7 @@ class Chromium extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Chrome.getAutofills.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -801,13 +802,13 @@ class Chromium extends Extract { logger.log(Level.WARNING, String.format("Error reading Chrome Autofill artifacts file '%s' (id=%d).", webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getAutofill.errMsg.errAnalyzingFiles", - this.getName(), webDataFile.getName())); + this.getDisplayName(), webDataFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Chrome Web data file '%s' (id=%d).", tempFilePath, webDataFile.getName(), webDataFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Chrome.getLogin.errMsg.errAnalyzingFiles", - this.getName(), webDataFile.getName())); + this.getDisplayName(), webDataFile.getName())); continue; } File dbFile = new File(tempFilePath); @@ -826,20 +827,20 @@ class Chromium extends Extract { getFormAddressArtifacts(webDataFile, tempFilePath, isSchemaV8X); if (databaseEncrypted) { String comment = String.format("%s Autofill Database Encryption Detected", browser); - Collection bbattributes = Arrays.asList( - new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, - RecentActivityExtracterModuleFactory.getModuleName(), comment)); + Collection bbattributes = Arrays.asList( + new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, + RecentActivityExtracterModuleFactory.getModuleName(), comment)); - bbartifacts.add( - webDataFile.newAnalysisResult( - BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE, - null, null, comment, bbattributes).getAnalysisResult()); + bbartifacts.add( + webDataFile.newAnalysisResult( + BlackboardArtifact.Type.TSK_ENCRYPTION_DETECTED, Score.SCORE_NOTABLE, + null, null, comment, bbattributes).getAnalysisResult()); } } catch (NoCurrentCaseException | TskCoreException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, String.format("Error adding artifacts to the case database " + "for chrome file %s [objId=%d]", webDataFile.getName(), webDataFile.getId()), ex); } - + dbFile.delete(); } @@ -852,8 +853,8 @@ class Chromium extends Extract { * Extracts and returns autofill artifacts from the given database file * * @param webDataFile - the database file in the data source - * @param dbFilePath - path to a temporary file where the DB file is - * extracted + * @param dbFilePath - path to a temporary file where the DB file is + * extracted * @param isSchemaV8X - indicates of the DB schema version is 8X or greater * * @return collection of TSK_WEB_FORM_AUTOFILL artifacts @@ -866,8 +867,8 @@ class Chromium extends Extract { String autoFillquery = (isSchemaV8X) ? AUTOFILL_QUERY_V8X : AUTOFILL_QUERY; - List> autofills = this.dbConnect(dbFilePath, autoFillquery); - logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, autofills.size()}); //NON-NLS + List> autofills = this.querySQLiteDb(dbFilePath, autoFillquery); + logger.log(Level.INFO, "{0}- Now getting Autofill information from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, autofills.size()}); //NON-NLS for (HashMap result : autofills) { Collection bbattributes = new ArrayList<>(); @@ -902,10 +903,10 @@ class Chromium extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE)); } - + // Add an artifact try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_FORM_AUTOFILL, webDataFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create web form autopfill artifact for file (%d)", webDataFile.getId()), ex); } @@ -920,8 +921,8 @@ class Chromium extends Extract { * database file * * @param webDataFile - the database file in the data source - * @param dbFilePath - path to a temporary file where the DB file is - * extracted + * @param dbFilePath - path to a temporary file where the DB file is + * extracted * @param isSchemaV8X - indicates of the DB schema version is 8X or greater * * @return collection of TSK_WEB_FORM_ADDRESS artifacts @@ -936,16 +937,16 @@ class Chromium extends Extract { WebBrowserArtifactsHelper helper = new WebBrowserArtifactsHelper( Case.getCurrentCaseThrows().getSleuthkitCase(), NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), - webDataFile + webDataFile, context.getJobId() ); // Get Web form addresses - List> addresses = this.dbConnect(dbFilePath, webformAddressQuery); - logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getName(), dbFilePath, addresses.size()}); //NON-NLS + List> addresses = this.querySQLiteDb(dbFilePath, webformAddressQuery); + logger.log(Level.INFO, "{0}- Now getting Web form addresses from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), dbFilePath, addresses.size()}); //NON-NLS for (HashMap result : addresses) { fieldEncrypted = false; - + String first_name = processFields(result.get("first_name")); String middle_name = processFields(result.get("middle_name")); String last_name = processFields(result.get("last_name")); @@ -968,7 +969,7 @@ class Chromium extends Extract { long use_date = 0; if (isSchemaV8X) { - + full_name = processFields(result.get("full_name")); street_address = processFields(result.get("street_address")); date_modified = result.get("date_modified").toString() != null ? Long.valueOf(result.get("date_modified").toString()) : 0; @@ -995,7 +996,7 @@ class Chromium extends Extract { if (fieldEncrypted) { otherAttributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, RecentActivityExtracterModuleFactory.getModuleName(), ENCRYPTED_FIELD_MESSAGE)); //NON-NLS - + } } @@ -1007,9 +1008,12 @@ class Chromium extends Extract { } /** - * Check the type of the object and if it is bytes then it is encrypted and return the string and - * set flag that field and file are encrypted - * @param dataValue Object to be checked, the object is from a database result set + * Check the type of the object and if it is bytes then it is encrypted and + * return the string and set flag that field and file are encrypted + * + * @param dataValue Object to be checked, the object is from a database + * result set + * * @return the actual string or an empty string */ private String processFields(Object dataValue) { @@ -1018,14 +1022,14 @@ class Chromium extends Extract { fieldEncrypted = true; databaseEncrypted = true; } - + return dataValue.toString() != null ? dataValue.toString() : ""; - + } - + private boolean isChromePreVersion30(String temps) { String query = "PRAGMA table_info(downloads)"; //NON-NLS - List> columns = this.dbConnect(temps, query); + List> columns = this.querySQLiteDb(temps, query); for (HashMap col : columns) { if (col.get("name").equals("url")) { //NON-NLS return true; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java index 0e4fa5ecf1..f9686076f2 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DataSourceUsageAnalyzer.java @@ -42,7 +42,7 @@ import org.sleuthkit.datamodel.TskData; * systems the images may have been used by. * */ -@Messages({"DataSourceUsageAnalyzer.parentModuleName=Recent Activity"}) +@Messages({"DataSourceUsageAnalyzer.displayName=Data Source Usage Analyzer"}) class DataSourceUsageAnalyzer extends Extract { private static final Logger logger = Logger.getLogger(DataSourceUsageAnalyzer.class.getName()); @@ -56,37 +56,38 @@ class DataSourceUsageAnalyzer extends Extract { {".android_secure", "android", "audio", "photos", "dcim", "music", "pictures", "videos"}; //NON-NLS private Content dataSource; + private final IngestJobContext context; + + DataSourceUsageAnalyzer(IngestJobContext context) { + super(Bundle.DataSourceUsageAnalyzer_displayName(), context); + this.context = context; + } @Messages({ "# {0} - OS name", "DataSourceUsageAnalyzer.customVolume.label=OS Drive ({0})", "Progress_Message_Analyze_Usage=Data Sources Usage Analysis",}) @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; try { progressBar.progress(Bundle.Progress_Message_Analyze_Usage()); - createDataSourceUsageArtifacts(context); + createDataSourceUsageArtifacts(); } catch (TskCoreException ex) { logger.log(Level.WARNING, "Failed to check if datasource contained a volume with operating system specific files", ex); } } - private void createDataSourceUsageArtifacts(IngestJobContext context) throws TskCoreException { - + private void createDataSourceUsageArtifacts() throws TskCoreException { createOSInfoDataSourceUsageArtifacts(); - if (context.dataSourceIngestIsCancelled()) { return; } - createAndroidMediaCardArtifacts(); - if (context.dataSourceIngestIsCancelled()) { return; } - createDJIDroneDATArtitifacts(); } @@ -146,9 +147,9 @@ class DataSourceUsageAnalyzer extends Extract { } Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION, - Bundle.DataSourceUsageAnalyzer_parentModuleName(), + getRAModuleName(), dataSourceUsageDescription)); //NON-NLS - postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes)); + postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_DATA_SOURCE_USAGE, dataSource, bbattributes)); } /** @@ -160,7 +161,7 @@ class DataSourceUsageAnalyzer extends Extract { */ private void checkIfOsSpecificVolume(ExtractOs.OS_TYPE osType) throws TskCoreException { for (String filePath : osType.getFilePaths()) { - for (AbstractFile file : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, + for (AbstractFile file : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(filePath), FilenameUtils.getPath(filePath))) { createDataSourceUsageArtifact(osType.getDsUsageLabel()); return; @@ -199,7 +200,7 @@ class DataSourceUsageAnalyzer extends Extract { return; } - if(hasAndroidMediaCardRootNames()) { + if (hasAndroidMediaCardRootNames()) { return; } @@ -214,12 +215,12 @@ class DataSourceUsageAnalyzer extends Extract { /** * Checks the data source for any android media card root files - * + * * @return True if root files were found - * - * @throws TskCoreException + * + * @throws TskCoreException */ - private boolean hasAndroidMediaCardRootNames() throws TskCoreException{ + private boolean hasAndroidMediaCardRootNames() throws TskCoreException { FileManager fileManager = currentCase.getServices().getFileManager(); for (String fileName : ANDROID_MEDIACARD_ROOT_FILENAMES) { for (AbstractFile file : fileManager.findFiles(dataSource, fileName, "/")) { // NON-NLS diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java index d24a031a48..084ac10f5c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/DomainCategoryRunner.java @@ -44,7 +44,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -59,7 +58,7 @@ import org.sleuthkit.autopsy.url.analytics.DomainCategory; * is created. */ @Messages({ - "DomainCategoryRunner_moduleName_text=DomainCategoryRunner", + "DomainCategoryRunner_moduleName_text=Domain Category Analyzer", "DomainCategoryRunner_Progress_Message_Domain_Types=Finding Domain Types", "DomainCategoryRunner_parentModuleName=Recent Activity" }) @@ -98,13 +97,15 @@ class DomainCategoryRunner extends Extract { BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY) .map(BlackboardArtifact.Type::new) .collect(Collectors.toList()); + private final IngestJobContext context; /** * Get seconds from epoch from the mapping for the attribute type id. * - * @param attrMap A mapping of attribute type id to BlackboardAttribute for - * an artifact. + * @param attrMap A mapping of attribute type id to BlackboardAttribute + * for an artifact. * @param attrTypeId The attribute type id to fetch. + * * @return The time in seconds from epoch or 0 if cannot be found. */ private static long getTimeOrZero(Map attrMap, int attrTypeId) { @@ -119,9 +120,10 @@ class DomainCategoryRunner extends Extract { /** * Get string for attribute type id or "" if cannot be determined. * - * @param attrMap A mapping of attribute type id to BlackboardAttribute for - * an artifact. + * @param attrMap A mapping of attribute type id to BlackboardAttribute + * for an artifact. * @param attrTypeId The attribute type id to fetch. + * * @return The string value or "" if cannot be determined or null. */ private static String getStringOrEmpty(Map attrMap, int attrTypeId) { @@ -174,14 +176,14 @@ class DomainCategoryRunner extends Extract { }; private Content dataSource; - private IngestJobContext context; private List domainProviders = Collections.emptyList(); /** * Main constructor. */ - DomainCategoryRunner() { - + DomainCategoryRunner(IngestJobContext context) { + super(Bundle.DomainCategoryRunner_moduleName_text(), context); + this.context = context; } /** @@ -189,6 +191,7 @@ class DomainCategoryRunner extends Extract { * determined, returns null. * * @param urlString The url string. + * * @return The host or null if cannot be determined. */ private String getHost(String urlString) { @@ -218,7 +221,8 @@ class DomainCategoryRunner extends Extract { * Attempts to find the category for the given host/domain. * * @param domain The domain for the item. - * @param host The host for the item. + * @param host The host for the item. + * * @return The domain category result or null if none can be determined. */ private DomainCategory findCategory(String domain, String host) { @@ -252,8 +256,10 @@ class DomainCategoryRunner extends Extract { * Main constructor. * * @param abstractFile The parent file of the artifact. - * @param host The host of the artifact found in the url attribute. - * @param domain The domain of the artifact in the TSK_DOMAIN attribute. + * @param host The host of the artifact found in the url + * attribute. + * @param domain The domain of the artifact in the TSK_DOMAIN + * attribute. */ ArtifactHost(AbstractFile abstractFile, String host, String domain) { this.abstractFile = abstractFile; @@ -288,8 +294,10 @@ class DomainCategoryRunner extends Extract { * parent file. * * @param artifact The web artifact to parse. + * * @return The pertinent information or null if important information cannot - * be determined. + * be determined. + * * @throws TskCoreException */ private ArtifactHost getDomainAndHost(BlackboardArtifact artifact) throws TskCoreException { @@ -337,9 +345,10 @@ class DomainCategoryRunner extends Extract { * item is added to the set. * * @param items The set of items. - * @param item The item whose existence will be checked in the set. + * @param item The item whose existence will be checked in the set. + * * @return True if item is already contained in 'items'. False if the is - * null or if not contained in 'items'. + * null or if not contained in 'items'. */ private static boolean isDuplicateOrAdd(Set items, String item) { if (StringUtils.isBlank(item)) { @@ -428,8 +437,8 @@ class DomainCategoryRunner extends Extract { /** * Adds a TSK_WEB_CATEGORIZATION artifact for the given information. * - * @param artHost Pertinent details for the artifact (i.e. host, domain, - * parent file). + * @param artHost Pertinent details for the artifact (i.e. host, + * domain, parent file). * @param domainCategory The category for this host/domain. */ private void addCategoryArtifact(ArtifactHost artHost, String domainCategory) throws TskCoreException { @@ -439,60 +448,58 @@ class DomainCategoryRunner extends Extract { new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_HOST, moduleName, artHost.getHost()), new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, moduleName, domainCategory) ); - postArtifact(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes)); + postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_CATEGORIZATION, artHost.getAbstractFile(), bbattributes)); } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; - progressBar.progress(Bundle.DomainCategoryRunner_Progress_Message_Domain_Types()); this.findDomainTypes(); } @Override - void configExtractor() throws IngestModule.IngestModuleException { + void startUp() throws IngestModule.IngestModuleException { // lookup all providers, filter null providers, and sort providers Collection lookupCollection = Lookup.getDefault().lookupAll(DomainCategorizer.class); - Collection lookupList = (lookupCollection == null) ? - Collections.emptyList() : - lookupCollection; - + Collection lookupList = (lookupCollection == null) + ? Collections.emptyList() + : lookupCollection; + // this will be the class instance of the foundProviders List foundProviders = new ArrayList<>(); - + // find the custom domain categories provider if present and add it first to the list lookupList.stream() .filter(categorizer -> categorizer.getClass().getName().contains(CUSTOM_CATEGORIZER_PATH)) .findFirst() .ifPresent((provider) -> foundProviders.add(provider)); - + // add the default priority categorizer foundProviders.add(new DefaultPriorityDomainCategorizer()); - + // add all others except for the custom web domain categorizer, the default priority // categorizer and the default categorizer lookupList.stream() .filter(categorizer -> categorizer != null) .filter(categorizer -> { String className = categorizer.getClass().getName(); - return !className.contains(CUSTOM_CATEGORIZER_PATH) && - !className.equals(DefaultPriorityDomainCategorizer.class.getName()) && - !className.equals(DefaultDomainCategorizer.class.getName()); + return !className.contains(CUSTOM_CATEGORIZER_PATH) + && !className.equals(DefaultPriorityDomainCategorizer.class.getName()) + && !className.equals(DefaultDomainCategorizer.class.getName()); }) .sorted((a, b) -> a.getClass().getName().compareToIgnoreCase(b.getClass().getName())) .forEach(foundProviders::add); - + // add the default categorizer last foundProviders.add(new DefaultDomainCategorizer()); - + for (DomainCategorizer provider : foundProviders) { try { provider.initialize(); } catch (DomainCategorizerException ex) { - throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " + - provider.getClass().getSimpleName(), ex); + throw new IngestModule.IngestModuleException("There was an error instantiating the provider: " + + provider.getClass().getSimpleName(), ex); } } @@ -500,7 +507,7 @@ class DomainCategoryRunner extends Extract { } @Override - public void complete() { + public void shutDown() { if (this.domainProviders != null) { for (DomainCategorizer provider : this.domainProviders) { try { @@ -510,7 +517,6 @@ class DomainCategoryRunner extends Extract { } } } - - logger.info("Domain categorization completed."); //NON-NLS + super.shutDown(); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index fc8cb67c65..8eb649b93d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -1,19 +1,19 @@ /* * * Autopsy Forensic Browser - * + * * Copyright 2012-2021 Basis Technology Corp. - * + * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com * Project Contact/Architect: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,9 +35,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.logging.Level; -import org.openide.util.NbBundle.Messages; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect; import org.sleuthkit.autopsy.datamodel.ContentUtils; @@ -47,222 +45,195 @@ import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT; +import org.sleuthkit.datamodel.BlackboardArtifact.Category; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Score; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; - abstract class Extract { - protected Case currentCase; - protected SleuthkitCase tskCase; - protected Blackboard blackboard; - private final Logger logger = Logger.getLogger(this.getClass().getName()); + protected final Case currentCase; + protected final SleuthkitCase tskCase; + private static final Logger logger = Logger.getLogger(Extract.class.getName()); private final ArrayList errorMessages = new ArrayList<>(); - private String moduleName = ""; - boolean dataFound = false; - private RAOsAccountCache osAccountCache = null; - - Extract() { - this(""); - } - - Extract(String moduleName) { - this.moduleName = moduleName; - } - - final void init() throws IngestModuleException { - try { - currentCase = Case.getCurrentCaseThrows(); - tskCase = currentCase.getSleuthkitCase(); - blackboard = tskCase.getBlackboard(); - } catch (NoCurrentCaseException ex) { - throw new IngestModuleException(Bundle.Extract_indexError_message(), ex); - } - configExtractor(); - } - - /** - * Override to add any module-specific configuration - * - * @throws IngestModuleException - */ - void configExtractor() throws IngestModuleException { - } + private final String displayName; + protected boolean dataFound = false; + private final IngestJobContext context; /** - * Extractor process method intended to mirror the Ingest process method. - * - * Subclasses should overload just the abstract version of the method. - * - * @param dataSource The data source object to ingest. - * @param context The the context for the current job. - * @param progressBar A handle to the progressBar for the module to update with status. - * @param osAccountCache The OsAccountCache. - */ - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar, RAOsAccountCache osAccountCache) { - this.osAccountCache = osAccountCache; - process(dataSource, context, progressBar); - } - - abstract void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar); - - void complete() { - } - - /** - * Returns a List of string error messages from the inheriting class + * Constructs the super class part of an extractor used by the Recent + * Activity ingest module to do its analysis for an ingest job. * - * @return errorMessages returns all error messages logged + * @param displayName The display name of the extractor. + * @param context The ingest job context. + */ + Extract(String displayName, IngestJobContext context) { + this.displayName = displayName; + this.context = context; + currentCase = Case.getCurrentCase(); + tskCase = currentCase.getSleuthkitCase(); + } + + /** + * Starts up this extractor. Called by the Recent Activity ingest module in + * its startUp() method. + * + * @throws IngestModuleException The exception is thrown if there is an + * error starting up the extractor. + */ + void startUp() throws IngestModuleException { + } + + /** + * Analyzes the given data source. Called by the Recent Activity ingest + * module in its process() method. + * + * @param dataSource The data source to be analyzed. + * @param progressBar A progress object that can be used to report analysis + * progress. + */ + abstract void process(Content dataSource, DataSourceIngestModuleProgress progressBar); + + /** + * Shuts down this extractor. Called by the Recent Activity ingest module in + * its shutDown() method. + */ + void shutDown() { + } + + /** + * Gets any error messages generated by the extractor during processing. + * + * @return errorMessages The error message strings. */ List getErrorMessages() { - return errorMessages; + return Collections.unmodifiableList(errorMessages); } /** - * Adds a string to the error message list + * Adds an error message to the collection of error messages generated by + * the extractor during processing. * - * @param message is an error message represented as a string + * @param message The error message. */ protected void addErrorMessage(String message) { errorMessages.add(message); } - + /** - * Generic method for creating artifacts. + * Creates an artifact with the given attributes. * - * @param type The type of artifact. - * @param file The file the artifact originated from. - * @param attributes A list of the attributes to associate with the - * artifact. - * - * @return The newly created artifact. - */ - BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE type, Content content, Collection attributes) throws TskCoreException { - return createArtifactWithAttributes(new BlackboardArtifact.Type(type), content, attributes); - } - - /** - * Generic method for creating artifacts. - * - * @param type The type of artifact. - * @param content The file the artifact originated from. - * @param attributes A list of the attributes to associate with the - * artifact. + * @param type The artifact type. + * @param content The artifact source/parent. + * @param attributes The attributes. * * @return The newly created artifact. * - * @throws TskCoreException + * @throws TskCoreException This exception is thrown if there is an issue + * creating the artifact. */ BlackboardArtifact createArtifactWithAttributes(BlackboardArtifact.Type type, Content content, Collection attributes) throws TskCoreException { - switch (type.getCategory()) { - case DATA_ARTIFACT: - return content.newDataArtifact(type, attributes); - case ANALYSIS_RESULT: - return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult(); - default: - throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName()); + if (type.getCategory() == BlackboardArtifact.Category.DATA_ARTIFACT) { + return content.newDataArtifact(type, attributes); + } else if (type.getCategory() == BlackboardArtifact.Category.ANALYSIS_RESULT) { + return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, attributes).getAnalysisResult(); + } else { + throw new TskCoreException("Unknown category type: " + type.getCategory().getDisplayName()); } } /** - * Returns and associated artifact for the given artifact. + * Creates an associated artifact for a given artifact. * - * @param content The content to create the artifact from. - * @param artifact The artifact to associate the new artifact with. + * @param content The artifact source/parent. + * @param artifact The artifact with which to associate the new artifact. * * @return The newly created artifact. * - * @throws TskCoreException + * @throws TskCoreException This exception is thrown if there is an issue + * creating the artifact. */ BlackboardArtifact createAssociatedArtifact(Content content, BlackboardArtifact artifact) throws TskCoreException { - return createArtifactWithAttributes(TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, - RecentActivityExtracterModuleFactory.getModuleName(), artifact.getArtifactID()))); + BlackboardAttribute attribute = new BlackboardAttribute(BlackboardAttribute.Type.TSK_ASSOCIATED_ARTIFACT, getRAModuleName(), artifact.getArtifactID()); + return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_ASSOCIATED_OBJECT, content, Collections.singletonList(attribute)); } - + /** - * Method to post a blackboard artifact to the blackboard. + * Posts an artifact to the blackboard. * - * @param bbart Blackboard artifact to be indexed. Nothing will occure if a null object is passed in. + * @param artifact The artifact. */ - @Messages({"Extract.indexError.message=Failed to index artifact for keyword search.", - "Extract.noOpenCase.errMsg=No open case available."}) - void postArtifact(BlackboardArtifact bbart) { - if(bbart == null) { - return; - } - - try { - // index the artifact for keyword search - blackboard.postArtifact(bbart, getName()); - } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bbart.getDisplayName(), ex); //NON-NLS - } - } - - /** - * Method to post a list of BlackboardArtifacts to the blackboard. - * - * @param artifacts A list of artifacts. IF list is empty or null, the function will return. - */ - void postArtifacts(Collection artifacts) { - if(artifacts == null || artifacts.isEmpty()) { - return; - } - - try{ - blackboard.postArtifacts(artifacts, getName()); - } catch (Blackboard.BlackboardException ex) { - logger.log(Level.SEVERE, "Unable to post blackboard artifacts", ex); //NON-NLS + void postArtifact(BlackboardArtifact artifact) { + if (artifact != null && !context.dataArtifactIngestIsCancelled()) { + postArtifacts(Collections.singleton(artifact)); } } /** + * Posts a collection of artifacts to the blackboard. + * + * @param artifacts The artifacts. + */ + void postArtifacts(Collection artifacts) { + if (artifacts != null && !artifacts.isEmpty() && !context.dataArtifactIngestIsCancelled()) { + try { + tskCase.getBlackboard().postArtifacts(artifacts, RecentActivityExtracterModuleFactory.getModuleName(), context.getJobId()); + } catch (Blackboard.BlackboardException ex) { + logger.log(Level.SEVERE, "Failed to post artifacts", ex); //NON-NLS + } + } + } + + /** + * Connects to a SQLite database file (e.g., an application database) and + * executes a query. + * * Returns a List from a result set based on sql query. This is used to * query sqlite databases storing user recent activity data, such as in * firefox sqlite db * - * @param path is the string path to the sqlite db file - * @param query is a sql string query that is to be run + * @param path The path to the SQLite database file + * @param query The SQL query to be executed. * - * @return list is the ArrayList that contains the resultset information in - * it that the query obtained + * @return A list of maps that represents the query results. Each map entry + * consists of a column name as a key and an Object as a column + * value, with empty strings substituted for nulls. */ - protected List> dbConnect(String path, String query) { - ResultSet temprs; + protected List> querySQLiteDb(String path, String query) { + ResultSet resultSet; List> list; String connectionString = "jdbc:sqlite:" + path; //NON-NLS - SQLiteDBConnect tempdbconnect = null; + SQLiteDBConnect dbConnection = null; try { - tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS - temprs = tempdbconnect.executeQry(query); - list = this.resultSetToArrayList(temprs); + dbConnection = new SQLiteDBConnect("org.sqlite.JDBC", connectionString); //NON-NLS + resultSet = dbConnection.executeQry(query); + list = resultSetToArrayList(resultSet); } catch (SQLException ex) { logger.log(Level.WARNING, "Error while trying to read into a sqlite db." + connectionString, ex); //NON-NLS return Collections.>emptyList(); - } - finally { - if (tempdbconnect != null) { - tempdbconnect.closeConnection(); + } finally { + if (dbConnection != null) { + dbConnection.closeConnection(); } } return list; } /** - * Returns a List of AbstractFile objects from TSK based on sql query. + * Converts a JDBC result set to a list of maps. Each map entry consists of + * a column name as a key and an Object as a column value, with empty + * strings substituted for nulls. * - * @param rs is the resultset that needs to be converted to an arraylist + * @param rs The result set. * - * @return list returns the arraylist built from the converted resultset + * @return The list of maps. */ private List> resultSetToArrayList(ResultSet rs) throws SQLException { ResultSetMetaData md = rs.getMetaData(); int columns = md.getColumnCount(); - List> list = new ArrayList<>(50); + List> results = new ArrayList<>(50); while (rs.next()) { HashMap row = new HashMap<>(columns); for (int i = 1; i <= columns; ++i) { @@ -272,63 +243,76 @@ abstract class Extract { row.put(md.getColumnName(i), rs.getObject(i)); } } - list.add(row); + results.add(row); } - - return list; + return results; } /** - * Returns the name of the inheriting class + * Gets the display name of this extractor. * - * @return Gets the moduleName set in the moduleName data member + * @return The display name. */ - protected String getName() { - return moduleName; + protected String getDisplayName() { + return displayName; } - + + /** + * Get the display name of the Recent Activity module. + * + * @return The display name. + */ protected String getRAModuleName() { return RecentActivityExtracterModuleFactory.getModuleName(); } /** - * Returns the state of foundData - * @return + * Gets the value of a flag indicating whether or not this extractor found + * any data. + * + * @return True or false. */ public boolean foundData() { return dataFound; } - + /** - * Sets the value of foundData - * @param foundData + * Sets the value of a flag indicating whether or not this extractor found + * any data. + * + * @param foundData True or false. */ - protected void setFoundData(boolean foundData){ + protected void setFoundData(boolean foundData) { dataFound = foundData; } - + /** - * Returns the current case instance - * @return Current case instance + * Gets the current case. + * + * @return The current case. */ - protected Case getCurrentCase(){ + protected Case getCurrentCase() { return this.currentCase; } - + /** - * Creates a list of attributes for a history artifact. + * Creates a list of attributes for a web history artifact. * - * @param url - * @param accessTime Time url was accessed - * @param referrer referred url - * @param title title of the page - * @param programName module name - * @param domain domain of the url - * @param user user that accessed url - * @return List of BlackboardAttributes for giving attributes - * @throws TskCoreException + * @param url The URL, may be null. + * @param accessTime The time the URL was accessed, may be null. + * @param referrer The referring URL, may be null. + * @param title Title of the returned resource, may be null. + * @param programName The program that executed the request, may be the + * empty string, may be null. + * @param domain The domain of the URL, may be null. + * @param user The user that accessed URL, may be null. + * + * @return The list of attributes. + * + * @throws TskCoreException The exception is thrown if there is an issue + * creating the attributes. */ - protected Collection createHistoryAttribute(String url, Long accessTime, + protected Collection createHistoryAttributes(String url, Long accessTime, String referrer, String title, String programName, String domain, String user) throws TskCoreException { Collection bbattributes = new ArrayList<>(); @@ -363,17 +347,18 @@ abstract class Extract { return bbattributes; } - + /** - * Creates a list of attributes for a cookie. + * Creates a list of attributes for a web cookie artifact. * - * @param url cookie url - * @param creationTime cookie creation time - * @param name cookie name - * @param value cookie value - * @param programName Name of the module creating the attribute - * @param domain Domain of the URL - * @return List of BlackboarAttributes for the passed in attributes + * @param url The cookie url, may be null. + * @param creationTime The cookie creation time, may be null. + * @param name The cookie name, may be null. + * @param value The cookie value, may be null. + * @param programName The program that created the cookie, may be null. + * @param domain The domain of the cookie URL, may be null. + * + * @return The list of attributes. */ protected Collection createCookieAttributes(String url, Long creationTime, Long accessTime, Long endTime, String name, String value, String programName, String domain) { @@ -387,13 +372,13 @@ abstract class Extract { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, RecentActivityExtracterModuleFactory.getModuleName(), creationTime)); } - + if (accessTime != null && accessTime != 0) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, RecentActivityExtracterModuleFactory.getModuleName(), accessTime)); } - - if(endTime != null && endTime != 0) { + + if (endTime != null && endTime != 0) { bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_END, RecentActivityExtracterModuleFactory.getModuleName(), endTime)); } @@ -418,14 +403,16 @@ abstract class Extract { } /** - * Creates a list of bookmark attributes from the passed in parameters. + * Creates a list of attributes for a web bookmark artifact. * - * @param url Bookmark url - * @param title Title of the bookmarked page - * @param creationTime Date & time at which the bookmark was created - * @param programName Name of the module creating the attribute - * @param domain The domain of the bookmark's url - * @return A collection of bookmark attributes + * @param url The bookmark URL, may be null. + * @param title The title of the bookmarked page, may be null. + * @param creationTime The date and time at which the bookmark was created, + * may be null. + * @param programName The program that created the bookmark, may be null. + * @param domain The domain of the bookmark's URL, may be null. + * + * @return The list of attributes. */ protected Collection createBookmarkAttributes(String url, String title, Long creationTime, String programName, String domain) { Collection bbattributes = new ArrayList<>(); @@ -454,15 +441,16 @@ abstract class Extract { return bbattributes; } - /** - * Creates a list of the attributes of a downloaded file + /** + * Creates a list of attributes for a web download artifact. * - * @param path - * @param url URL of the downloaded file - * @param accessTime Time the download occurred - * @param domain Domain of the URL - * @param programName Name of the module creating the attribute - * @return A collection of attributes of a downloaded file + * @param path The path of the downloaded file, may be null. + * @param url The URL of the downloaded file, may be null. + * @param accessTime The time the download occurred, may be null. + * @param domain The domain of the URL, may be null. + * @param programName The program that downloaded the file, may be null. + * + * @return The list of attributes. */ protected Collection createDownloadAttributes(String path, Long pathID, String url, Long accessTime, String domain, String programName) { Collection bbattributes = new ArrayList<>(); @@ -496,44 +484,24 @@ abstract class Extract { return bbattributes; } - + /** - * Creates a list of the attributes for source of a downloaded file + * Writes a file to disk in this extractor's dedicated temp directory within + * the Recent Activity ingest modules temp directory. The object ID of the + * file is appended to the file name for uniqueness. * - * @param url source URL of the downloaded file - * @return A collection of attributes for source of a downloaded file + * @param file The file. + * + * @return A File object that represents the file on disk. + * + * @throws IOException Exception thrown if there is a problem writing the + * file to disk. */ - protected Collection createDownloadSourceAttributes(String url) { - Collection bbattributes = new ArrayList<>(); - - bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, - RecentActivityExtracterModuleFactory.getModuleName(), - (url != null) ? url : "")); //NON-NLS - - return bbattributes; - } - - /** - * Create temporary file for the given AbstractFile. The new file will be - * created in the temp directory for the module with a unique file name. - * - * @param context - * @param file - * @param IngestJobId The ingest job id. - * @return Newly created copy of the AbstractFile - * @throws IOException - */ - protected File createTemporaryFile(IngestJobContext context, AbstractFile file, long ingestJobId) throws IOException{ - Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath( - getCurrentCase(), getName(), ingestJobId), file.getName() + file.getId() + file.getNameExtension()); + protected File createTemporaryFile(AbstractFile file) throws IOException { + Path tempFilePath = Paths.get(RAImageIngestModule.getRATempPath(getCurrentCase(), getDisplayName(), context.getJobId()), file.getName() + file.getId() + file.getNameExtension()); java.io.File tempFile = tempFilePath.toFile(); - - try { - ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled); - } catch (IOException ex) { - throw new IOException("Error writingToFile: " + file, ex); //NON-NLS - } - + ContentUtils.writeToFile(file, tempFile, context::dataSourceIngestIsCancelled); return tempFile; } + } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java index 68f280a5fa..a47bdd184d 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractEdge.java @@ -39,8 +39,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.Logger; @@ -65,7 +63,7 @@ final class ExtractEdge extends Extract { private static final Logger LOG = Logger.getLogger(ExtractEdge.class.getName()); private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; private HashMap> containersTable; private static final String EDGE = "Edge"; //NON-NLS @@ -114,32 +112,31 @@ final class ExtractEdge extends Extract { "ExtractEdge_process_errMsg_errGettingWebCacheFiles=Error trying to retrieving Edge WebCacheV01 file", "ExtractEdge_process_errMsg_webcacheFail=Failure processing Microsoft Edge WebCacheV01.dat file", "ExtractEdge_process_errMsg_spartanFail=Failure processing Microsoft Edge spartan.edb file", - "ExtractEdge_Module_Name=Microsoft Edge", + "ExtractEdge_Module_Name=Microsoft Edge Analyzer", "ExtractEdge_getHistory_containerFileNotFound=Error while trying to analyze Edge history", "Progress_Message_Edge_History=Microsoft Edge History", "Progress_Message_Edge_Bookmarks=Microsoft Edge Bookmarks", - "Progress_Message_Edge_Cookies=Microsoft Edge Cookies", - }) + "Progress_Message_Edge_Cookies=Microsoft Edge Cookies",}) /** - * Extract the bookmarks, cookies, downloads and history from Microsoft Edge - */ - ExtractEdge() { - super(Bundle.ExtractEdge_Module_Name()); + * Extract the bookmarks, cookies, downloads and history from Microsoft Edge + */ + ExtractEdge(IngestJobContext context) { + super(Bundle.ExtractEdge_Module_Name(), context); + this.context = context; } @Override - protected String getName() { + protected String getDisplayName() { return Bundle.ExtractEdge_Module_Name(); } @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), EDGE, context.getJobId()); String moduleTempResultDir = Paths.get(moduleTempDir, EDGE_RESULT_FOLDER_NAME).toString(); - + this.dataSource = dataSource; - this.context = context; this.setFoundData(false); List webCacheFiles = null; @@ -151,7 +148,7 @@ final class ExtractEdge extends Extract { this.addErrorMessage(Bundle.ExtractEdge_process_errMsg_errGettingWebCacheFiles()); LOG.log(Level.SEVERE, "Error fetching 'WebCacheV01.dat' files for Microsoft Edge", ex); //NON-NLS } - + if (context.dataSourceIngestIsCancelled()) { return; } @@ -174,7 +171,7 @@ final class ExtractEdge extends Extract { LOG.log(Level.WARNING, "Microsoft Edge files found, unable to parse on Non-Windows system"); //NON-NLS return; } - + if (context.dataSourceIngestIsCancelled()) { return; } @@ -206,22 +203,22 @@ final class ExtractEdge extends Extract { * Process WebCacheV01.dat ese database file creating artifacts for cookies, * and history contained within. * - * @param eseDumperPath Path to ESEDatabaseView.exe - * @param webCacheFiles List of case WebCacheV01.dat files - * @param moduleTempDir The temp directory for this module. + * @param eseDumperPath Path to ESEDatabaseView.exe + * @param webCacheFiles List of case WebCacheV01.dat files + * @param moduleTempDir The temp directory for this module. * @param moduleTempResultDir The temp results directory for this module. + * * @throws IOException * @throws TskCoreException */ - void processWebCacheDbFile(String eseDumperPath, List webCacheFiles, DataSourceIngestModuleProgress progressBar, + void processWebCacheDbFile(String eseDumperPath, List webCacheFiles, DataSourceIngestModuleProgress progressBar, String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { - for (AbstractFile webCacheFile : webCacheFiles) { if (context.dataSourceIngestIsCancelled()) { return; } - + clearContainerTable(); //Run the dumper @@ -245,9 +242,9 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Edge_History()); - + this.getHistory(webCacheFile, resultsDir); if (context.dataSourceIngestIsCancelled()) { @@ -255,7 +252,7 @@ final class ExtractEdge extends Extract { } progressBar.progress(Bundle.Progress_Message_Edge_Cookies()); - + this.getCookies(webCacheFile, resultsDir); } finally { @@ -266,19 +263,18 @@ final class ExtractEdge extends Extract { } /** - * Process spartan.edb ese database file creating artifacts for the bookmarks - * contained within. + * Process spartan.edb ese database file creating artifacts for the + * bookmarks contained within. * - * @param eseDumperPath Path to ESEDatabaseViewer - * @param spartanFiles List of the case spartan.edb files - * @param moduleTempDir The temp directory for this module. + * @param eseDumperPath Path to ESEDatabaseViewer + * @param spartanFiles List of the case spartan.edb files + * @param moduleTempDir The temp directory for this module. * @param moduleTempResultDir The temp results directory for this module. + * * @throws IOException * @throws TskCoreException */ - void processSpartanDbFile(String eseDumperPath, List spartanFiles, - String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { - + void processSpartanDbFile(String eseDumperPath, List spartanFiles, String moduleTempDir, String moduleTempResultDir) throws IOException, TskCoreException { for (AbstractFile spartanFile : spartanFiles) { if (context.dataSourceIngestIsCancelled()) { @@ -287,7 +283,7 @@ final class ExtractEdge extends Extract { //Run the dumper String tempSpartanFileName = EDGE_WEBCACHE_PREFIX - + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; + + Integer.toString((int) spartanFile.getId()) + EDGE_WEBCACHE_EXT; File tempSpartanFile = new File(moduleTempDir, tempSpartanFileName); try { @@ -320,14 +316,15 @@ final class ExtractEdge extends Extract { * getHistory searches the files with "container" in the file name for lines * with the text "Visited" in them. Note that not all of the container * files, if fact most of them do not, have the browser history in them. - * @param origFile Original case file + * + * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer + * * @throws TskCoreException - * @throws FileNotFoundException + * @throws FileNotFoundException */ private void getHistory(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { ArrayList historyFiles = getHistoryFiles(resultDir); - if (historyFiles == null) { return; } @@ -336,7 +333,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + Scanner fileScanner; try { fileScanner = new Scanner(new FileInputStream(file.toString())); @@ -353,7 +350,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + String line = fileScanner.nextLine(); if (headers == null) { headers = Arrays.asList(line.toLowerCase().split(",")); @@ -380,8 +377,9 @@ final class ExtractEdge extends Extract { /** * Search for bookmark files and make artifacts. * - * @param origFile Original case file + * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer + * * @throws TskCoreException * @throws FileNotFoundException */ @@ -425,8 +423,9 @@ final class ExtractEdge extends Extract { /** * Queries for cookie files and adds artifacts. * - * @param origFile Original case file + * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer + * * @throws TskCoreException */ private void getCookies(AbstractFile origFile, File resultDir) throws TskCoreException { @@ -440,7 +439,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + Scanner fileScanner; try { fileScanner = new Scanner(new FileInputStream(file.toString())); @@ -457,7 +456,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + String line = fileScanner.nextLine(); if (headers == null) { headers = Arrays.asList(line.toLowerCase().split(",")); @@ -481,13 +480,14 @@ final class ExtractEdge extends Extract { /** * Queries for download files and adds artifacts. - * + * * Leaving for future use. - * - * @param origFile Original case file + * + * @param origFile Original case file * @param resultDir Output directory of ESEDatabaseViewer + * * @throws TskCoreException - * @throws FileNotFoundException + * @throws FileNotFoundException */ private void getDownloads(AbstractFile origFile, File resultDir) throws TskCoreException, FileNotFoundException { ArrayList downloadFiles = getDownloadFiles(resultDir); @@ -500,7 +500,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + Scanner fileScanner; try { fileScanner = new Scanner(new FileInputStream(file.toString())); @@ -516,7 +516,7 @@ final class ExtractEdge extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + String line = fileScanner.nextLine(); if (headers == null) { headers = Arrays.asList(line.toLowerCase().split(",")); @@ -535,7 +535,7 @@ final class ExtractEdge extends Extract { fileScanner.close(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } @@ -544,7 +544,8 @@ final class ExtractEdge extends Extract { /** * Find the location of ESEDatabaseViewer.exe * - * @return Absolute path to ESEDatabaseViewer.exe or null if the file is not found + * @return Absolute path to ESEDatabaseViewer.exe or null if the file is not + * found */ private String getPathForESEDumper() { Path path = Paths.get(ESE_TOOL_FOLDER, ESE_TOOL_NAME); @@ -561,6 +562,7 @@ final class ExtractEdge extends Extract { * Finds all of the WebCacheV01.dat files in the case * * @return A list of WebCacheV01.dat files, possibly empty if none are found + * * @throws TskCoreException */ private List fetchWebCacheDBFiles() throws TskCoreException { @@ -573,6 +575,7 @@ final class ExtractEdge extends Extract { * Finds all of the spartan.edb files in the case * * @return A list of spartan files, possibly empty if none are found + * * @throws TskCoreException */ private List fetchSpartanDBFiles() throws TskCoreException { @@ -587,9 +590,10 @@ final class ExtractEdge extends Extract { * Each table in the ese database will be dumped as a comma separated file * named .csv * - * @param dumperPath Path to ESEDatabaseView.exe + * @param dumperPath Path to ESEDatabaseView.exe * @param inputFilePath Path to ese database file to be dumped - * @param outputDir Output directory for dumper + * @param outputDir Output directory for dumper + * * @throws IOException */ private void executeDumper(String dumperPath, String inputFilePath, @@ -619,9 +623,11 @@ final class ExtractEdge extends Extract { * table. * * @param origFile Original case file - * @param headers List of table headers - * @param line CSV string representing a row of history table + * @param headers List of table headers + * @param line CSV string representing a row of history table + * * @return BlackboardArtifact representing one history table entry + * * @throws TskCoreException */ private BlackboardArtifact getHistoryArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { @@ -638,9 +644,9 @@ final class ExtractEdge extends Extract { String accessTime = rowSplit[index].trim(); Long ftime = parseTimestamp(accessTime); - return createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, createHistoryAttribute(url, ftime, + return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, createHistoryAttributes(url, ftime, null, null, - this.getName(), + this.getDisplayName(), NetworkUtils.extractDomain(url), user)); } @@ -648,9 +654,11 @@ final class ExtractEdge extends Extract { * Create a BlackboardArtifact for the given row from the Edge cookie table. * * @param origFile Original case file - * @param headers List of table headers - * @param line CSV string representing a row of cookie table + * @param headers List of table headers + * @param line CSV string representing a row of cookie table + * * @return BlackboardArtifact representing one cookie table entry + * * @throws TskCoreException */ private BlackboardArtifact getCookieArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { @@ -664,7 +672,7 @@ final class ExtractEdge extends Extract { String value = hexToChar(lineSplit[headers.indexOf(EDGE_HEAD_VALUE)].trim()); String url = flipDomain(domain); - return createArtifactWithAttributes(TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getName(), NetworkUtils.extractDomain(url))); + return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, origFile, createCookieAttributes(url, null, ftime, null, name, value, this.getDisplayName(), NetworkUtils.extractDomain(url))); } /** @@ -675,17 +683,19 @@ final class ExtractEdge extends Extract { * it apart. * * @param origFile Original case file - * @param headers List of table headers - * @param line CSV string representing a row of download table + * @param headers List of table headers + * @param line CSV string representing a row of download table + * * @return BlackboardArtifact representing one download table entry + * * @throws TskCoreException */ private BlackboardArtifact getDownloadArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { BlackboardArtifact bbart = null; - + String[] lineSplit = line.split(","); // NON-NLS String rheader = lineSplit[headers.indexOf(EDGE_HEAD_RESPONSEHEAD)]; - + return bbart; } @@ -696,9 +706,12 @@ final class ExtractEdge extends Extract { * Note: The "Favorites" table does not have a "Creation Time" * * @param origFile File the table came from ie spartan.edb - * @param headers List of table column headers - * @param line The line or row of the table to parse - * @return BlackboardArtifact representation of the passed in line\table row or null if no Bookmark is found + * @param headers List of table column headers + * @param line The line or row of the table to parse + * + * @return BlackboardArtifact representation of the passed in line\table row + * or null if no Bookmark is found + * * @throws TskCoreException */ private BlackboardArtifact getBookmarkArtifact(AbstractFile origFile, List headers, String line) throws TskCoreException { @@ -711,28 +724,27 @@ final class ExtractEdge extends Extract { if (url.isEmpty()) { return null; } - - return createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null, - this.getName(), NetworkUtils.extractDomain(url))); + + return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, origFile, createBookmarkAttributes(url, title, null, + this.getDisplayName(), NetworkUtils.extractDomain(url))); } - /** * Attempt to parse the timestamp. - * + * * ESEDatabaseView makes timestamps based on the locale of the machine so * they will not always be in the expected format. Additionally, the format * used in the database output does not appear to match the default format * using DateFormat.SHORT. Therefore, if the default US format doesn't work, * we will attempt to determine the correct pattern to use and save any * working pattern for the next attempt. - * + * * @param timeStr The date/time string to parse - * + * * @return The epoch time as a Long or null if it could not be parsed. */ private Long parseTimestamp(String timeStr) { - + // If we had a pattern that worked on the last date, use it again. if (previouslyValidDateFormat != null) { try { @@ -741,7 +753,7 @@ final class ExtractEdge extends Extract { // Continue on to format detection } } - + // Try the default US pattern try { SimpleDateFormat usDateFormat = new SimpleDateFormat("MM/dd/yyyy hh:mm:ss a"); //NON-NLS @@ -752,12 +764,12 @@ final class ExtractEdge extends Extract { } catch (ParseException ex) { // Continue on to format detection } - + // This generally doesn't match the data in the file but can give information on whether // the month or day is first. boolean monthFirstFromLocale = true; String localeDatePattern = ((SimpleDateFormat) DateFormat.getDateInstance( - DateFormat.SHORT, Locale.getDefault())).toPattern(); + DateFormat.SHORT, Locale.getDefault())).toPattern(); if (localeDatePattern.startsWith("d")) { monthFirstFromLocale = false; } @@ -770,27 +782,27 @@ final class ExtractEdge extends Extract { if (matcher.find()) { int firstVal = Integer.parseInt(matcher.group(1)); int secondVal = Integer.parseInt(matcher.group(2)); - + if (firstVal > 12) { - monthFirst = false; + monthFirst = false; } else if (secondVal > 12) { monthFirst = true; - } + } // Otherwise keep the setting from the locale } - + // See if the time has AM/PM attached boolean hasAmPm = false; if (timeStr.endsWith("M") || timeStr.endsWith("m")) { hasAmPm = true; } - + // See if the date appears to use forward slashes. If not, assume '.' is being used. boolean hasSlashes = false; if (timeStr.contains("/")) { hasSlashes = true; } - + // Make our best guess at the pattern String dateFormatPattern; if (monthFirst) { @@ -800,19 +812,19 @@ final class ExtractEdge extends Extract { dateFormatPattern = "MM.dd.yyyy "; } } else { - if (hasSlashes) { + if (hasSlashes) { dateFormatPattern = "dd/MM/yyyy "; } else { dateFormatPattern = "dd.MM.yyyy "; - } + } } - + if (hasAmPm) { dateFormatPattern += "hh:mm:ss a"; } else { dateFormatPattern += "HH:mm:ss"; } - + try { SimpleDateFormat dateFormat = new SimpleDateFormat(dateFormatPattern); //NON-NLS dateFormat.setLenient(false); // Fail if month or day are out of range @@ -829,6 +841,7 @@ final class ExtractEdge extends Extract { * Converts a space separated string of hex values to ascii characters. * * @param hexString + * * @return "decoded" string or null if a non-hex value was found */ private String hexToChar(String hexString) { @@ -858,6 +871,7 @@ final class ExtractEdge extends Extract { * there to weed out the "junk". * * @param domain + * * @return Correct domain string */ private String flipDomain(String domain) { @@ -888,6 +902,7 @@ final class ExtractEdge extends Extract { * them. * * @param resultDir Path to ESEDatabaseViewer output + * * @return List of download table files */ private ArrayList getDownloadFiles(File resultDir) throws FileNotFoundException { @@ -898,7 +913,9 @@ final class ExtractEdge extends Extract { * Returns a list the container files that have history information in them. * * @param resultDir Path to ESEDatabaseViewer output + * * @return List of history table files + * * @throws FileNotFoundException */ private ArrayList getHistoryFiles(File resultDir) throws FileNotFoundException { @@ -909,8 +926,11 @@ final class ExtractEdge extends Extract { * Returns a list of the containers files that are of the given type string * * @param resultDir Path to ESEDatabaseViewer output - * @param type Type of table files - * @return List of table files returns null if no files of that type are found + * @param type Type of table files + * + * @return List of table files returns null if no files of that type are + * found + * * @throws FileNotFoundException */ private ArrayList getContainerFiles(File resultDir, String type) throws FileNotFoundException { @@ -938,7 +958,9 @@ final class ExtractEdge extends Extract { * files. * * @param resultDir Path to ESEDatabaseViewer output - * @return Hashmap with Key representing the table type, the value is a list of table ids for that type + * + * @return Hashmap with Key representing the table type, the value is a list + * of table ids for that type */ private HashMap> getContainerIDTable(File resultDir) throws FileNotFoundException { @@ -975,11 +997,11 @@ final class ExtractEdge extends Extract { return containersTable; } - + /** * Clears the containerTable */ - private void clearContainerTable(){ + private void clearContainerTable() { containersTable = null; } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index 29754fcdcb..41e5e0ceb3 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -43,7 +43,6 @@ import java.util.Scanner; import java.util.stream.Collectors; import org.openide.modules.InstalledFileLocator; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -72,7 +71,7 @@ class ExtractIE extends Extract { private static final String RESOURCE_URL_PREFIX = "res://"; private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; @Messages({ "Progress_Message_IE_History=IE History", @@ -83,30 +82,30 @@ class ExtractIE extends Extract { "Progress_Message_IE_AutoFill=IE Auto Fill", "Progress_Message_IE_Logins=IE Logins",}) - ExtractIE() { - super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text")); + ExtractIE(IngestJobContext context) { + super(NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"), context); JAVA_PATH = PlatformUtil.getJavaPath(); + this.context = context; } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { String moduleTempDir = RAImageIngestModule.getRATempPath(getCurrentCase(), "IE", context.getJobId()); String moduleTempResultsDir = Paths.get(moduleTempDir, "results").toString(); - + this.dataSource = dataSource; - this.context = context; dataFound = false; progressBar.progress(Bundle.Progress_Message_IE_Bookmarks()); this.getBookmark(); - + if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_IE_Cookies()); this.getCookie(); - + if (context.dataSourceIngestIsCancelled()) { return; } @@ -127,7 +126,7 @@ class ExtractIE extends Extract { logger.log(Level.WARNING, "Error fetching 'url' files for Internet Explorer bookmarks.", ex); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractIE.getBookmark.errMsg.errGettingBookmarks", - this.getName())); + this.getDisplayName())); return; } @@ -171,13 +170,13 @@ class ExtractIE extends Extract { } try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, fav, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, fav, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId() ), ex); + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getDisplayName(), fav.getId()), ex); } } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } @@ -199,12 +198,12 @@ class ExtractIE extends Extract { } catch (IOException ex) { logger.log(Level.WARNING, "Failed to read from content: " + fav.getName(), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getName(), + NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg", this.getDisplayName(), fav.getName())); } catch (IndexOutOfBoundsException ex) { logger.log(Level.WARNING, "Failed while getting URL of IE bookmark. Unexpected format of the bookmark file: " + fav.getName(), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getName(), + NbBundle.getMessage(this.getClass(), "ExtractIE.getURLFromIEBmkFile.errMsg2", this.getDisplayName(), fav.getName())); } finally { try { @@ -228,7 +227,7 @@ class ExtractIE extends Extract { } catch (TskCoreException ex) { logger.log(Level.WARNING, "Error getting cookie files for IE"); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getName())); + NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errGettingFile", this.getDisplayName())); return; } @@ -254,7 +253,7 @@ class ExtractIE extends Extract { logger.log(Level.WARNING, "Error reading bytes of Internet Explorer cookie.", ex); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractIE.getCookie.errMsg.errReadingIECookie", - this.getName(), cookiesFile.getName())); + this.getDisplayName(), cookiesFile.getName())); continue; } String cookieString = new String(t); @@ -285,21 +284,23 @@ class ExtractIE extends Extract { } try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId() ), ex); + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_COOKIE.getDisplayName(), cookiesFile.getId()), ex); } } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } /** * Locates index.dat files, runs Pasco on them, and creates artifacts. - * @param moduleTempDir The path to the module temp directory. - * @param moduleTempResultsDir The path to the module temp results directory. + * + * @param moduleTempDir The path to the module temp directory. + * @param moduleTempResultsDir The path to the module temp results + * directory. */ private void getHistory(String moduleTempDir, String moduleTempResultsDir) { logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); //NON-NLS @@ -308,7 +309,7 @@ class ExtractIE extends Extract { final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); //NON-NLS if (pascoRoot == null) { this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getName())); + NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.unableToGetHist", this.getDisplayName())); logger.log(Level.SEVERE, "Error finding pasco program "); //NON-NLS return; } @@ -329,7 +330,7 @@ class ExtractIE extends Extract { indexFiles = fileManager.findFiles(dataSource, "index.dat"); //NON-NLS } catch (TskCoreException ex) { this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errGettingHistFiles", - this.getName())); + this.getDisplayName())); logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history."); //NON-NLS return; } @@ -363,7 +364,7 @@ class ExtractIE extends Extract { } catch (IOException e) { logger.log(Level.WARNING, "Error while trying to write index.dat file " + datFile.getAbsolutePath(), e); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getName(), + NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errWriteFile", this.getDisplayName(), datFile.getAbsolutePath())); continue; } @@ -391,11 +392,11 @@ class ExtractIE extends Extract { } else { logger.log(Level.WARNING, "pasco execution failed on: {0}", filename); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getName())); + NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getDisplayName())); } } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } @@ -403,16 +404,15 @@ class ExtractIE extends Extract { /** * Execute pasco on a single file that has been saved to disk. * - * @param indexFilePath Path to local index.dat file to analyze - * @param outputFileName Name of file to save output to + * @param indexFilePath Path to local index.dat file to analyze + * @param outputFileName Name of file to save output to * @param moduleTempResultsDir the path to the module temp directory. * * @return false on error */ @Messages({ - "# {0} - sub module name", - "ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history", - }) + "# {0} - sub module name", + "ExtractIE_executePasco_errMsg_errorRunningPasco={0}: Error analyzing Internet Explorer web history",}) private boolean executePasco(String indexFilePath, String outputFileName, String moduleTempResultsDir) { boolean success = true; try { @@ -443,7 +443,7 @@ class ExtractIE extends Extract { // @@@ Investigate use of history versus cache as type. } catch (IOException ex) { logger.log(Level.SEVERE, "Error executing Pasco to process Internet Explorer web history", ex); //NON-NLS - addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getName())); + addErrorMessage(Bundle.ExtractIE_executePasco_errMsg_errorRunningPasco(getDisplayName())); success = false; } return success; @@ -452,9 +452,9 @@ class ExtractIE extends Extract { /** * parse Pasco output and create artifacts * - * @param origFile Original index.dat file that was analyzed to - * get this output - * @param pascoOutputFileName name of pasco output file + * @param origFile Original index.dat file that was analyzed to + * get this output + * @param pascoOutputFileName name of pasco output file * @param moduleTempResultsDir the path to the module temp directory. * * @return A collection of created artifacts @@ -467,7 +467,7 @@ class ExtractIE extends Extract { File file = new File(fnAbs); if (file.exists() == false) { this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getName(), + NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getDisplayName(), file.getName())); logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath()); //NON-NLS return bbartifacts; @@ -484,7 +484,7 @@ class ExtractIE extends Extract { fileScanner = new Scanner(new FileInputStream(file.toString())); } catch (FileNotFoundException ex) { this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getName(), + NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsing", this.getDisplayName(), file.getName())); logger.log(Level.WARNING, "Unable to find the Pasco file at " + file.getPath(), ex); //NON-NLS return bbartifacts; @@ -562,7 +562,7 @@ class ExtractIE extends Extract { } catch (ParseException e) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.errParsingEntry", - this.getName())); + this.getDisplayName())); logger.log(Level.WARNING, String.format("Error parsing Pasco results, may have partial processing of corrupt file (id=%d)", origFile.getId()), e); //NON-NLS } } @@ -589,9 +589,9 @@ class ExtractIE extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), user)); try { - bbartifacts.add(createArtifactWithAttributes(TSK_WEB_HISTORY, origFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, origFile, bbattributes)); } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create %s for file %d",ARTIFACT_TYPE.TSK_WEB_HISTORY.getDisplayName(), origFile.getId() ), ex); + logger.log(Level.SEVERE, String.format("Failed to create %s for file %d", BlackboardArtifact.Type.TSK_WEB_HISTORY.getDisplayName(), origFile.getId()), ex); } } fileScanner.close(); diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractJumpLists.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractJumpLists.java index 25ac8acd11..02c9c7d70e 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractJumpLists.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractJumpLists.java @@ -53,16 +53,13 @@ import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; /** - * Extract the LNK files from the jumplists and save them to ModuleOutput\RecentActivity\Jumplists - * and then add them back into the case as a dervived file. + * Extract the LNK files from the jumplists and save them to + * ModuleOutput\RecentActivity\Jumplists and then add them back into the case as + * a dervived file. */ final class ExtractJumpLists extends Extract { private static final Logger logger = Logger.getLogger(ExtractJumpLists.class.getName()); - - private IngestJobContext context; - - private static final String JUMPLIST_TSK_COMMENT = "Jumplist File"; private static final String RA_DIR_NAME = "RecentActivity"; //NON-NLS private static final String AUTOMATIC_DESTINATIONS_FILE_DIRECTORY = "%/AppData/Roaming/Microsoft/Windows/Recent/AutomaticDestinations/"; private static final String JUMPLIST_DIR_NAME = "jumplists"; //NON-NLS @@ -70,26 +67,25 @@ final class ExtractJumpLists extends Extract { private String moduleName; private FileManager fileManager; private final IngestServices services = IngestServices.getInstance(); + private final IngestJobContext context; @Messages({ - "Jumplist_module_name=Windows Jumplist Extractor", + "Jumplist_module_name=Windows Jumplist Analyzer", "Jumplist_adding_extracted_files_msg=Chrome Cache: Adding %d extracted files for analysis." }) - ExtractJumpLists() { - super(Bundle.Jumplist_module_name()); + ExtractJumpLists(IngestJobContext context) { + super(Bundle.Jumplist_module_name(), context); + this.context = context; } @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { - - this.context = context; + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { moduleName = Bundle.Jumplist_module_name(); - fileManager = currentCase.getServices().getFileManager(); + fileManager = currentCase.getServices().getFileManager(); long ingestJobId = context.getJobId(); - String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME , ingestJobId); + String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId); List jumpListFiles = extractJumplistFiles(dataSource, ingestJobId, baseRaTempPath); - if (jumpListFiles.isEmpty()) { return; } @@ -102,26 +98,28 @@ final class ExtractJumpLists extends Extract { String derivedPath = null; String baseRaModPath = RAImageIngestModule.getRAOutputPath(Case.getCurrentCase(), JUMPLIST_DIR_NAME, ingestJobId); for (AbstractFile jumplistFile : jumpListFiles) { - if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..") && - !jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) { - String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString(); - String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); - derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); + if (!jumplistFile.getName().toLowerCase().contains("-slack") && !jumplistFile.getName().equals("..") + && !jumplistFile.getName().equals(".") && jumplistFile.getSize() > 0) { + String jlFile = Paths.get(baseRaTempPath, jumplistFile.getName() + "_" + jumplistFile.getId()).toString(); + String moduleOutPath = baseRaModPath + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); + derivedPath = RA_DIR_NAME + File.separator + JUMPLIST_DIR_NAME + "_" + ingestJobId + File.separator + jumplistFile.getName() + "_" + jumplistFile.getId(); File jlDir = new File(moduleOutPath); if (jlDir.exists() == false) { boolean dirMade = jlDir.mkdirs(); if (!dirMade) { logger.log(Level.WARNING, "Error creating directory to store Jumplist LNK files %s", moduleOutPath); //NON-NLS - continue; + continue; } } derivedFiles.addAll(extractLnkFiles(jlFile, moduleOutPath, jumplistFile, derivedPath)); } } - + // notify listeners of new files and schedule for analysis progressBar.progress(String.format(Bundle.Jumplist_adding_extracted_files_msg(), derivedFiles.size())); - derivedFiles.forEach((derived) -> { services.fireModuleContentEvent(new ModuleContentEvent(derived)); }); + derivedFiles.forEach((derived) -> { + services.fireModuleContentEvent(new ModuleContentEvent(derived)); + }); context.addFilesToJob(derivedFiles); } @@ -132,8 +130,8 @@ final class ExtractJumpLists extends Extract { * @return - list of jumplist abstractfiles or empty list */ private List extractJumplistFiles(Content dataSource, Long ingestJobId, String baseRaTempPath) { - List jumpListFiles = new ArrayList<>();; - List tempJumpListFiles = new ArrayList<>();; + List jumpListFiles = new ArrayList<>();; + List tempJumpListFiles = new ArrayList<>();; FileManager fileManager = Case.getCurrentCase().getServices().getFileManager(); @@ -141,22 +139,22 @@ final class ExtractJumpLists extends Extract { tempJumpListFiles = fileManager.findFiles(dataSource, "%", AUTOMATIC_DESTINATIONS_FILE_DIRECTORY); //NON-NLS if (!tempJumpListFiles.isEmpty()) { jumpListFiles.addAll(tempJumpListFiles); - } + } } catch (TskCoreException ex) { logger.log(Level.WARNING, "Unable to find jumplist files.", ex); //NON-NLS return jumpListFiles; // No need to continue } - + for (AbstractFile jumpListFile : jumpListFiles) { if (context.dataSourceIngestIsCancelled()) { return jumpListFiles; } - - if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..") && - !jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) { + + if (!jumpListFile.getName().toLowerCase().contains("-slack") && !jumpListFile.getName().equals("..") + && !jumpListFile.getName().equals(".") && jumpListFile.getSize() > 0) { String fileName = jumpListFile.getName() + "_" + jumpListFile.getId(); - String jlFile = Paths.get(baseRaTempPath, fileName).toString(); + String jlFile = Paths.get(baseRaTempPath, fileName).toString(); try { ContentUtils.writeToFile(jumpListFile, new File(jlFile)); } catch (IOException ex) { @@ -168,81 +166,80 @@ final class ExtractJumpLists extends Extract { return jumpListFiles; } - + /* - * Read each jumplist file and extract the lnk files to moduleoutput - */ + * Read each jumplist file and extract the lnk files to moduleoutput + */ private List extractLnkFiles(String jumpListFile, String moduleOutPath, AbstractFile jumpListAbsFile, String derivedPath) { - + List derivedFiles = new ArrayList<>(); DerivedFile derivedFile; String lnkFileName = ""; - + try (POIFSFileSystem fs = new POIFSFileSystem(new File(jumpListFile))) { - DirectoryEntry root = fs.getRoot(); - for (Entry entry : root) { - if (entry instanceof DirectoryEntry) { - //If this data structure needed to recurse this is where it would do it but jumplists do not need to at this time - continue; - } else if (entry instanceof DocumentEntry) { - String jmpListFileName = entry.getName(); - int fileSize = ((DocumentEntry) entry).getSize(); - - if (fileSize > 0) { - try (DocumentInputStream stream = fs.createDocumentInputStream(jmpListFileName)) { - byte[] buffer = new byte[stream.available()]; - stream.read(buffer); + DirectoryEntry root = fs.getRoot(); + for (Entry entry : root) { + if (entry instanceof DirectoryEntry) { + //If this data structure needed to recurse this is where it would do it but jumplists do not need to at this time + continue; + } else if (entry instanceof DocumentEntry) { + String jmpListFileName = entry.getName(); + int fileSize = ((DocumentEntry) entry).getSize(); - JLnkParser lnkParser = new JLnkParser(fs.createDocumentInputStream(jmpListFileName), fileSize); - JLNK lnk = lnkParser.parse(); - lnkFileName = lnk.getBestName() + ".lnk"; - File targetFile = new File(moduleOutPath + File.separator + entry.getName() + "-" + lnkFileName); - String relativePath = Case.getCurrentCase().getModuleOutputDirectoryRelativePath(); - String derivedFileName = Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + File.separator + derivedPath + File.separator + entry.getName() + "-" + lnkFileName; - OutputStream outStream = new FileOutputStream(targetFile); - outStream.write(buffer); - outStream.close(); - derivedFile = fileManager.addDerivedFile(lnkFileName, derivedFileName, - fileSize, - 0, - 0, - 0, - 0, // TBD - true, - jumpListAbsFile, - "", - moduleName, - VERSION_NUMBER, - "", - TskData.EncodingType.NONE); - derivedFiles.add(derivedFile); + if (fileSize > 0) { + try (DocumentInputStream stream = fs.createDocumentInputStream(jmpListFileName)) { + byte[] buffer = new byte[stream.available()]; + stream.read(buffer); - } catch (IOException | JLnkParserException ex) { - logger.log(Level.WARNING, String.format("No such document, or the Entry represented by documentName is not a DocumentEntry link file is %s", jumpListFile), ex); //NON-NLS - } catch (TskCoreException ex) { - logger.log(Level.WARNING, String.format("Error trying to add dervived file %s", lnkFileName), ex); //NON-NLS - } catch (IndexOutOfBoundsException ex) { - // There is some type of corruption within the file that cannot be handled, ignoring it and moving on to next file - // in the jumplist. - logger.log(Level.WARNING, String.format("Error parsing the the jumplist file %s", jumpListFile), ex); //NON-NLS - } + JLnkParser lnkParser = new JLnkParser(fs.createDocumentInputStream(jmpListFileName), fileSize); + JLNK lnk = lnkParser.parse(); + lnkFileName = lnk.getBestName() + ".lnk"; + File targetFile = new File(moduleOutPath + File.separator + entry.getName() + "-" + lnkFileName); + String relativePath = Case.getCurrentCase().getModuleOutputDirectoryRelativePath(); + String derivedFileName = Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + File.separator + derivedPath + File.separator + entry.getName() + "-" + lnkFileName; + OutputStream outStream = new FileOutputStream(targetFile); + outStream.write(buffer); + outStream.close(); + derivedFile = fileManager.addDerivedFile(lnkFileName, derivedFileName, + fileSize, + 0, + 0, + 0, + 0, // TBD + true, + jumpListAbsFile, + "", + moduleName, + VERSION_NUMBER, + "", + TskData.EncodingType.NONE); + derivedFiles.add(derivedFile); + + } catch (IOException | JLnkParserException ex) { + logger.log(Level.WARNING, String.format("No such document, or the Entry represented by documentName is not a DocumentEntry link file is %s", jumpListFile), ex); //NON-NLS + } catch (TskCoreException ex) { + logger.log(Level.WARNING, String.format("Error trying to add dervived file %s", lnkFileName), ex); //NON-NLS + } catch (IndexOutOfBoundsException ex) { + // There is some type of corruption within the file that cannot be handled, ignoring it and moving on to next file + // in the jumplist. + logger.log(Level.WARNING, String.format("Error parsing the the jumplist file %s", jumpListFile), ex); //NON-NLS } - } else { + } + } else { // currently, either an Entry is a DirectoryEntry or a DocumentEntry, // but in the future, there may be other entry subinterfaces. // The internal data structure certainly allows for a lot more entry types. continue; - } } - } catch (NotOLE2FileException | EmptyFileException ex1) { - logger.log(Level.WARNING, String.format("Error file not a valid OLE2 Document $s", jumpListFile)); //NON-NLS - } catch (IOException ex) { - logger.log(Level.WARNING, String.format("Error lnk parsing the file to get recent files $s", jumpListFile), ex); //NON-NLS - } - - return derivedFiles; - + } + } catch (NotOLE2FileException | EmptyFileException ex1) { + logger.log(Level.WARNING, String.format("Error file not a valid OLE2 Document $s", jumpListFile)); //NON-NLS + } catch (IOException ex) { + logger.log(Level.WARNING, String.format("Error lnk parsing the file to get recent files $s", jumpListFile), ex); //NON-NLS + } + + return derivedFiles; + } - + } - \ No newline at end of file diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java index 6bc00ada63..bf42434934 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractOs.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -26,7 +26,6 @@ import java.util.List; import java.util.logging.Level; import org.apache.commons.io.FilenameUtils; import org.openide.util.NbBundle.Messages; -import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; @@ -40,8 +39,8 @@ import org.sleuthkit.datamodel.TskCoreException; * Create OS INFO artifacts for the Operating Systems believed to be present on * the data source. */ -@Messages({"ExtractOs.parentModuleName=Recent Activity", - "ExtractOS_progressMessage=Checking for OS"}) +@Messages({"ExtractOs.displayName=OS Info Analyzer", + "ExtractOS_progressMessage=Checking for OS"}) class ExtractOs extends Extract { private static final Logger logger = Logger.getLogger(ExtractOs.class.getName()); @@ -64,16 +63,22 @@ class ExtractOs extends Extract { private static final String LINUX_UBUNTU_PATH = "/etc/lsb-release"; private Content dataSource; - + private final IngestJobContext context; + + ExtractOs(IngestJobContext context) { + super(Bundle.ExtractOs_displayName(), context); + this.context = context; + } + @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; try { progressBar.progress(Bundle.ExtractOS_progressMessage()); for (OS_TYPE value : OS_TYPE.values()) { if (context.dataSourceIngestIsCancelled()) { return; - } + } checkForOSFiles(value); } @@ -100,9 +105,9 @@ class ExtractOs extends Extract { //if the os info program name is not empty create an os info artifact on the first of the files found Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, - Bundle.ExtractOs_parentModuleName(), + getRAModuleName(), osType.getOsInfoLabel())); //NON-NLS - postArtifact(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_OS_INFO, file, bbattributes)); + postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, file, bbattributes)); } } @@ -116,7 +121,7 @@ class ExtractOs extends Extract { * @return the first AbstractFile found which matched a specified path to * search for */ - private AbstractFile getFirstFileFound(List pathsToSearchFor) throws TskCoreException{ + private AbstractFile getFirstFileFound(List pathsToSearchFor) throws TskCoreException { for (String filePath : pathsToSearchFor) { List files = currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(filePath), FilenameUtils.getPath(filePath)); if (!files.isEmpty()) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java index c63adccd5a..18e823f5b3 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractPrefetch.java @@ -65,8 +65,7 @@ final class ExtractPrefetch extends Extract { private static final Logger logger = Logger.getLogger(ExtractPrefetch.class.getName()); - private IngestJobContext context; - + private final IngestJobContext context; private static final String PREFETCH_TSK_COMMENT = "Prefetch File"; private static final String PREFETCH_FILE_LOCATION = "/windows/prefetch"; private static final String PREFETCH_TOOL_FOLDER = "markmckinnon"; //NON-NLS @@ -80,28 +79,29 @@ final class ExtractPrefetch extends Extract { private static final String PREFETCH_DIR_NAME = "prefetch"; //NON-NLS @Messages({ - "ExtractPrefetch_module_name=Windows Prefetch Extractor", - "# {0} - sub module name", + "ExtractPrefetch_module_name=Windows Prefetch Analyzer", + "# {0} - sub module name", "ExtractPrefetch_errMsg_prefetchParsingFailed={0}: Error analyzing prefetch files" }) - ExtractPrefetch() { - super(Bundle.ExtractPrefetch_module_name()); + ExtractPrefetch(IngestJobContext context) { + super(Bundle.ExtractPrefetch_module_name(), context); + this.context = context; } /** * Get the temp folder name. - * + * * @param dataSource Current data source + * * @return The folder name */ private String getPrefetchTempFolder(Content dataSource) { return dataSource.getId() + "-" + PREFETCH_PARSER_DB_FILE; } - - @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { - this.context = context; + @Override + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { + long ingestJobId = context.getJobId(); String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + PREFETCH_DIR_NAME; @@ -170,7 +170,7 @@ final class ExtractPrefetch extends Extract { String baseName = FilenameUtils.getBaseName(origFileName); String fileName = escapeFileName(String.format("%s_%d.%s", baseName, pFile.getId(), ext)); String baseRaTempPath = RAImageIngestModule.getRATempPath(Case.getCurrentCase(), getPrefetchTempFolder(dataSource), ingestJobId); - String prefetchFile = Paths.get(baseRaTempPath, fileName).toString(); + String prefetchFile = Paths.get(baseRaTempPath, fileName).toString(); try { ContentUtils.writeToFile(pFile, new File(prefetchFile)); } catch (IOException ex) { @@ -289,13 +289,14 @@ final class ExtractPrefetch extends Extract { logger.log(Level.WARNING, "Invalid format for PF file: " + prefetchFileName);//NON-NLS continue; } - - + /** - * A prefetch file is created when a program is run and the superfetch service collected data about the first 10 - * seconds of the run, the trace data is then written to a new prefetch file or merged with an existing prefetch file. - * If the prefetch file gets deleted for some reason then a new one will be created. See 7500 in JIRA for more - * information. + * A prefetch file is created when a program is run and the + * superfetch service collected data about the first 10 seconds + * of the run, the trace data is then written to a new prefetch + * file or merged with an existing prefetch file. If the + * prefetch file gets deleted for some reason then a new one + * will be created. See 7500 in JIRA for more information. */ AbstractFile pfAbstractFile = null; try { @@ -303,30 +304,30 @@ final class ExtractPrefetch extends Extract { if (c instanceof AbstractFile) { pfAbstractFile = (AbstractFile) c; } - } catch (NoCurrentCaseException | TskCoreException | NumberFormatException ex ) { + } catch (NoCurrentCaseException | TskCoreException | NumberFormatException ex) { logger.log(Level.SEVERE, "Unable to find content for: " + prefetchFileName, ex); } - + if (pfAbstractFile != null) { for (Long executionTime : prefetchExecutionTimes) { // only add prefetch file entries that have an actual date associated with them Collection blkBrdAttributes = Arrays.asList( new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getDisplayName(), applicationName),//NON-NLS new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getName(), filePath), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getDisplayName(), filePath), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getDisplayName(), executionTime), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, getName(), Integer.valueOf(timesProgramRun)), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, getDisplayName(), Integer.valueOf(timesProgramRun)), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), PREFETCH_TSK_COMMENT)); + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getDisplayName(), PREFETCH_TSK_COMMENT)); try { - BlackboardArtifact blkBrdArt = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, pfAbstractFile, blkBrdAttributes); + BlackboardArtifact blkBrdArt = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_PROG_RUN, pfAbstractFile, blkBrdAttributes); blkBrdArtList.add(blkBrdArt); BlackboardArtifact associatedBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), filePath, blkBrdArt, dataSource); if (associatedBbArtifact != null) { @@ -350,7 +351,7 @@ final class ExtractPrefetch extends Extract { postArtifacts(blkBrdArtList); } } - + /** * Create associated artifacts using file path name and the artifact it * associates with @@ -365,7 +366,7 @@ final class ExtractPrefetch extends Extract { private BlackboardArtifact createAssociatedArtifact(String fileName, String filePathName, BlackboardArtifact bba, Content dataSource) throws TskCoreException { AbstractFile sourceFile = getAbstractFile(fileName, filePathName, dataSource); if (sourceFile != null) { - return createAssociatedArtifact(sourceFile, bba); + return createAssociatedArtifact(sourceFile, bba); } return null; } @@ -401,7 +402,7 @@ final class ExtractPrefetch extends Extract { return null; - } + } /** * Cycle thru the execution times list and only return a new list of times diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java index 5d727f5b7d..c282fc49f4 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRecycleBin.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2019 Basis Technology Corp. + * Copyright 2019-2021 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -72,21 +72,23 @@ final class ExtractRecycleBin extends Extract { private static final Logger logger = Logger.getLogger(ExtractRecycleBin.class.getName()); private static final String RECYCLE_BIN_ARTIFACT_NAME = "TSK_RECYCLE_BIN"; //NON-NLS - + private static final String RECYCLE_BIN_DIR_NAME = "$RECYCLE.BIN"; //NON-NLS private static final int V1_FILE_NAME_OFFSET = 24; private static final int V2_FILE_NAME_OFFSET = 28; + private final IngestJobContext context; @Messages({ - "ExtractRecycleBin_module_name=Recycle Bin" + "ExtractRecycleBin_module_name=Recycle Bin Analyzer" }) - ExtractRecycleBin() { - super(Bundle.ExtractRecycleBin_module_name()); + ExtractRecycleBin(IngestJobContext context) { + super(Bundle.ExtractRecycleBin_module_name(), context); + this.context = context; } @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { // At this time it was decided that we would not include TSK_RECYCLE_BIN // in the default list of BlackboardArtifact types. try { @@ -98,13 +100,13 @@ final class ExtractRecycleBin extends Extract { BlackboardArtifact.Type recycleBinArtifactType; try { - recycleBinArtifactType = tskCase.getArtifactType(RECYCLE_BIN_ARTIFACT_NAME); + recycleBinArtifactType = tskCase.getBlackboard().getArtifactType(RECYCLE_BIN_ARTIFACT_NAME); } catch (TskCoreException ex) { logger.log(Level.WARNING, String.format("Unable to retrive custom artifact type %s", RECYCLE_BIN_ARTIFACT_NAME), ex); // NON-NLS // If this doesn't work bail. return; } - + // map SIDs to user names so that we can include that in the artifact Map userNameMap; try { @@ -152,8 +154,9 @@ final class ExtractRecycleBin extends Extract { } /** - * Process each individual iFile. Each iFile ($I) contains metadata about files that have been deleted. - * Each $I file should have a corresponding $R file which is the actuall deleted file. + * Process each individual iFile. Each iFile ($I) contains metadata about + * files that have been deleted. Each $I file should have a corresponding $R + * file which is the actuall deleted file. * * @param context * @param recycleBinArtifactType Module created artifact type @@ -223,7 +226,7 @@ final class ExtractRecycleBin extends Extract { } else { AbstractFile folder = getOrMakeFolder(Case.getCurrentCase().getSleuthkitCase(), (FsContent) rFile.getParent(), Paths.get(metaData.getFullWindowsPath()).getParent().toString()); - addFileSystemFile(skCase, (FsContent)rFile, folder, Paths.get(metaData.getFullWindowsPath()).getFileName().toString(), metaData.getDeletedTimeStamp()); + addFileSystemFile(skCase, (FsContent) rFile, folder, Paths.get(metaData.getFullWindowsPath()).getFileName().toString(), metaData.getDeletedTimeStamp()); } } } catch (TskCoreException ex) { @@ -270,25 +273,34 @@ final class ExtractRecycleBin extends Extract { } /** - * Parse the $I file. This file contains metadata information about deleted files + * Parse the $I file. This file contains metadata information about deleted + * files. * * File format prior to Windows 10: - * Offset Size Description - * 0 8 Header - * 8 8 File Size - * 16 8 Deleted Timestamp - * 24 520 File Name - * + * + * Offset Size Description + * + * 0 8 Header + * + * 8 8 File Size + * + * 16 8 Deleted Timestamp + * + * 24 520 File Name + * * File format Windows 10+ - * Offset Size Description - * 0 8 Header - * 8 8 File Size - * 16 8 Deleted TimeStamp - * 24 4 File Name Length - * 28 var File Name - * - * For versions of Windows prior to 10, header = 0x01. Windows 10+ header == - * 0x02 + * + * Offset Size Description + * + * 0 8 Header + * + * 8 8 File Size + * + * 16 8 Deleted TimeStamp + * + * 24 4 File Name Length + * + * 28 var File Name * * @param iFilePath Path to local copy of file in temp folder * @@ -297,35 +309,34 @@ final class ExtractRecycleBin extends Extract { private RecycledFileMetaData parseIFile(String iFilePath) throws IOException { try { byte[] allBytes = Files.readAllBytes(Paths.get(iFilePath)); - - ByteBuffer byteBuffer = ByteBuffer.wrap(allBytes); - byteBuffer.order(ByteOrder.LITTLE_ENDIAN); + ByteBuffer byteBuffer = ByteBuffer.wrap(allBytes); + byteBuffer.order(ByteOrder.LITTLE_ENDIAN); - long version = byteBuffer.getLong(); - long fileSize = byteBuffer.getLong(); - long timestamp = byteBuffer.getLong(); + long version = byteBuffer.getLong(); + long fileSize = byteBuffer.getLong(); + long timestamp = byteBuffer.getLong(); - // Convert from windows FILETIME to Unix Epoch seconds - timestamp = Util.filetimeToMillis(timestamp) / 1000; + // Convert from windows FILETIME to Unix Epoch seconds + timestamp = Util.filetimeToMillis(timestamp) / 1000; - byte[] stringBytes; + byte[] stringBytes; - if (version == 1) { - stringBytes = Arrays.copyOfRange(allBytes, V1_FILE_NAME_OFFSET, allBytes.length); - } else { - int fileNameLength = byteBuffer.getInt() * 2; //Twice the bytes for unicode - stringBytes = Arrays.copyOfRange(allBytes, V2_FILE_NAME_OFFSET, V2_FILE_NAME_OFFSET + fileNameLength); - } + if (version == 1) { + stringBytes = Arrays.copyOfRange(allBytes, V1_FILE_NAME_OFFSET, allBytes.length); + } else { + int fileNameLength = byteBuffer.getInt() * 2; //Twice the bytes for unicode + stringBytes = Arrays.copyOfRange(allBytes, V2_FILE_NAME_OFFSET, V2_FILE_NAME_OFFSET + fileNameLength); + } - String fileName = new String(stringBytes, "UTF-16LE"); //NON-NLS + String fileName = new String(stringBytes, "UTF-16LE"); //NON-NLS - return new RecycledFileMetaData(fileSize, timestamp, fileName); + return new RecycledFileMetaData(fileSize, timestamp, fileName); } catch (IOException | BufferUnderflowException | IllegalArgumentException | ArrayIndexOutOfBoundsException ex) { throw new IOException("Error parsing $I File, file is corrupt or not a valid I$ file", ex); } } - + /** * Create a map of userids to usernames from the OS Accounts. * @@ -337,8 +348,8 @@ final class ExtractRecycleBin extends Extract { */ private Map makeUserNameMap(Content dataSource) throws TskCoreException { Map userNameMap = new HashMap<>(); - - for(OsAccount account: tskCase.getOsAccountManager().getOsAccounts(((DataSource)dataSource).getHost())) { + + for (OsAccount account : tskCase.getOsAccountManager().getOsAccounts(((DataSource) dataSource).getHost())) { Optional userName = account.getLoginName(); userNameMap.put(account.getName(), userName.isPresent() ? userName.get() : ""); } @@ -438,16 +449,16 @@ final class ExtractRecycleBin extends Extract { */ private BlackboardArtifact createArtifact(AbstractFile rFile, BlackboardArtifact.Type type, String fileName, String userName, long dateTime) throws TskCoreException { List attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(TSK_DATETIME_DELETED, getName(), dateTime)); - attributes.add(new BlackboardAttribute(TSK_USER_NAME, getName(), userName == null || userName.isEmpty() ? "" : userName)); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_DATETIME_DELETED, getDisplayName(), dateTime)); + attributes.add(new BlackboardAttribute(TSK_USER_NAME, getDisplayName(), userName == null || userName.isEmpty() ? "" : userName)); return createArtifactWithAttributes(type, rFile, attributes); } /** - * Returns a folder for the given path. If the path does not exist the - * the folder is created. Recursively makes as many parent folders as needed. - * + * Returns a folder for the given path. If the path does not exist the the + * folder is created. Recursively makes as many parent folders as needed. + * * @param skCase * @param dataSource * @param path @@ -466,8 +477,8 @@ final class ExtractRecycleBin extends Extract { if (!parentPath.equals("/")) { parentPath = parentPath + "/"; } - - files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='%s' AND name='%s'", + + files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='%s' AND name='%s'", dataSource.getFileSystemId(), SleuthkitCase.escapeSingleQuotes(parentPath), folderName != null ? SleuthkitCase.escapeSingleQuotes(folderName) : "")); } else { files = skCase.findAllFilesWhere(String.format("fs_obj_id=%s AND parent_path='/' AND name=''", dataSource.getFileSystemId())); @@ -482,9 +493,9 @@ final class ExtractRecycleBin extends Extract { } /** - * Adds a new file system file that is unallocated and maps to the original + * Adds a new file system file that is unallocated and maps to the original * file in recycle bin directory. - * + * * @param skCase The current case. * @param recycleBinFile The file from the recycle bin. * @param parentDir The directory that the recycled file was deleted. @@ -524,7 +535,7 @@ final class ExtractRecycleBin extends Extract { Path path = Paths.get(pathString); int nameCount = path.getNameCount(); - if(nameCount > 0) { + if (nameCount > 0) { String rootless = "/" + path.subpath(0, nameCount); return rootless.replace("\\", "/"); } else { @@ -533,13 +544,13 @@ final class ExtractRecycleBin extends Extract { } /** - * Helper function get from the given path either the file name or - * the last directory in the path. - * + * Helper function get from the given path either the file name or the last + * directory in the path. + * * @param filePath The file\directory path * - * @return If file path, returns the file name. If directory path the - * The last directory in the path is returned. + * @return If file path, returns the file name. If directory path the The + * last directory in the path is returned. */ String getFileName(String filePath) { Path fileNamePath = Paths.get(filePath).getFileName(); @@ -551,10 +562,10 @@ final class ExtractRecycleBin extends Extract { /** * Returns the parent path for the given path. - * + * * @param path Path string - * - * @return The parent path for the given path. + * + * @return The parent path for the given path. */ String getParentPath(String path) { Path parentPath = Paths.get(path).getParent(); @@ -605,12 +616,12 @@ final class ExtractRecycleBin extends Extract { } /** - * Returns the full path to the deleted file or folder. This path will + * Returns the full path to the deleted file or folder. This path will * include the drive letter, ie C:\ * * @return String name of the deleted file */ - String getFullWindowsPath() { + String getFullWindowsPath() { return fileName.trim(); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 83a07cf76c..5efab762a9 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -61,7 +61,6 @@ import java.util.AbstractMap; import java.util.ArrayList; import java.util.List; import java.util.Collection; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -92,7 +91,6 @@ import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAM import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH; import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HOME_DIR; import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.DataArtifact; import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.Host; import org.sleuthkit.datamodel.HostManager; @@ -176,29 +174,30 @@ class ExtractRegistry extends Extract { private final Path rrHome; // Path to the Autopsy version of RegRipper private final Path rrFullHome; // Path to the full version of RegRipper private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; private Map userNameMap; private final List samDomainIDsList = new ArrayList<>(); private String compName = ""; private String domainName = ""; - + private static final String SHELLBAG_ARTIFACT_NAME = "RA_SHELL_BAG"; //NON-NLS private static final String SHELLBAG_ATTRIBUTE_LAST_WRITE = "RA_SHELL_BAG_LAST_WRITE"; //NON-NLS private static final String SHELLBAG_ATTRIBUTE_KEY = "RA_SHELL_BAG_KEY"; //NON-NLS - + private static final SimpleDateFormat REG_RIPPER_TIME_FORMAT = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'", US); - + private BlackboardArtifact.Type shellBagArtifactType = null; private BlackboardAttribute.Type shellBagKeyAttributeType = null; private BlackboardAttribute.Type shellBagLastWriteAttributeType = null; - + static { REG_RIPPER_TIME_FORMAT.setTimeZone(getTimeZone("GMT")); } - ExtractRegistry() throws IngestModuleException { - super(NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text")); + ExtractRegistry(IngestJobContext context) throws IngestModuleException { + super(NbBundle.getMessage(ExtractRegistry.class, "ExtractRegistry.moduleName.text"), context); + this.context = context; final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS if (rrRoot == null) { @@ -264,7 +263,7 @@ class ExtractRegistry extends Extract { String msg = NbBundle.getMessage(this.getClass(), "ExtractRegistry.findRegFiles.errMsg.errReadingFile", "sam"); logger.log(Level.WARNING, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); } // find the user-specific ntuser-dat files @@ -290,7 +289,7 @@ class ExtractRegistry extends Extract { String msg = NbBundle.getMessage(this.getClass(), "ExtractRegistry.findRegFiles.errMsg.errReadingFile", regFileName); logger.log(Level.WARNING, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); } } return allRegistryFiles; @@ -299,6 +298,7 @@ class ExtractRegistry extends Extract { /** * Identifies registry files in the database by mtimeItem, runs regripper on * them, and parses the output. + * * @param ingestJobId The ingest job id. */ private void analyzeRegistryFiles(long ingestJobId) { @@ -316,7 +316,7 @@ class ExtractRegistry extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + String regFileName = regFile.getName(); long regFileId = regFile.getId(); String regFileNameLocal = RAImageIngestModule.getRATempPath(currentCase, "reg", ingestJobId) + File.separator + regFileName; @@ -329,14 +329,14 @@ class ExtractRegistry extends Extract { regFile.getName(), regFileId), ex); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp", - this.getName(), regFileName)); + this.getDisplayName(), regFileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp registry file '%s' for registry file '%s' (id=%d).", regFileNameLocal, regFile.getName(), regFileId), ex); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.errMsg.errWritingTemp", - this.getName(), regFileName)); + this.getDisplayName(), regFileName)); continue; } @@ -352,7 +352,7 @@ class ExtractRegistry extends Extract { logger.log(Level.SEVERE, null, ex); } - logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{getName(), regFileNameLocal}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{getDisplayName(), regFileNameLocal}); //NON-NLS RegOutputFiles regOutputFiles = ripRegistryFile(regFileNameLocal, outputPathBase); if (context.dataSourceIngestIsCancelled()) { break; @@ -362,9 +362,9 @@ class ExtractRegistry extends Extract { if (regOutputFiles.autopsyPlugins.isEmpty() == false && parseAutopsyPluginOutput(regOutputFiles.autopsyPlugins, regFile) == false) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.failedParsingResults", - this.getName(), regFileName)); + this.getDisplayName(), regFileName)); } - + if (context.dataSourceIngestIsCancelled()) { return; } @@ -375,7 +375,7 @@ class ExtractRegistry extends Extract { if (regFileNameLocal.toLowerCase().contains("sam") && parseSamPluginOutput(regOutputFiles.fullPlugins, regFile, ingestJobId) == false) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.failedParsingResults", - this.getName(), regFileName)); + this.getDisplayName(), regFileName)); } else if (regFileNameLocal.toLowerCase().contains("ntuser") || regFileNameLocal.toLowerCase().contains("usrclass")) { try { List shellbags = ShellBagParser.parseShellbagOutput(regOutputFiles.fullPlugins); @@ -387,13 +387,13 @@ class ExtractRegistry extends Extract { } else if (regFileNameLocal.toLowerCase().contains("system") && parseSystemPluginOutput(regOutputFiles.fullPlugins, regFile) == false) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractRegistry.analyzeRegFiles.failedParsingResults", - this.getName(), regFileName)); + this.getDisplayName(), regFileName)); } - + if (context.dataSourceIngestIsCancelled()) { return; } - + try { Report report = currentCase.addReport(regOutputFiles.fullPlugins, NbBundle.getMessage(this.getClass(), "ExtractRegistry.parentModuleName.noSpace"), @@ -478,7 +478,7 @@ class ExtractRegistry extends Extract { scanErrorLogs(errFilePath); } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Unable to run RegRipper on %s", regFilePath), ex); //NON-NLS - this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName(), regFilePath)); + this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getDisplayName(), regFilePath)); } } return regOutputFiles; @@ -517,7 +517,7 @@ class ExtractRegistry extends Extract { ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context, true)); } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error running RegRipper on %s", hiveFilePath), ex); //NON-NLS - this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName(), hiveFilePath)); + this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getDisplayName(), hiveFilePath)); } } @@ -567,7 +567,7 @@ class ExtractRegistry extends Extract { Long mtime = null; if (timenodes.getLength() > 0) { Element timenode = (Element) timenodes.item(0); - String etime = timenode.getTextContent(); + String etime = timenode.getTextContent().trim(); //sometimes etime will be an empty string and therefore can not be parsed into a date if (etime != null && !etime.isEmpty()) { try { @@ -636,7 +636,7 @@ class ExtractRegistry extends Extract { case "InstallDate": //NON-NLS if (value != null && !value.isEmpty()) { try { - installtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyyZ", US).parse(value+"+0000").getTime(); + installtime = new SimpleDateFormat("EEE MMM d HH:mm:ss yyyyZ", US).parse(value + "+0000").getTime(); String Tempdate = installtime.toString(); installtime = Long.valueOf(Tempdate) / MS_IN_SEC; } catch (ParseException e) { @@ -663,7 +663,7 @@ class ExtractRegistry extends Extract { // Check if there is already an OS_INFO artifact for this file, and add to that if possible. ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); + newArtifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } @@ -710,7 +710,7 @@ class ExtractRegistry extends Extract { // Check if there is already an OS_INFO artifact for this file and add to that if possible ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); + newArtifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } @@ -743,18 +743,18 @@ class ExtractRegistry extends Extract { // Check if there is already an OS_INFO artifact for this file and add to that if possible ArrayList results = tskCase.getBlackboardArtifacts(ARTIFACT_TYPE.TSK_OS_INFO, regFile.getId()); if (results.isEmpty()) { - newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_OS_INFO, regFile, bbattributes)); + newArtifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_OS_INFO, regFile, bbattributes)); } else { results.get(0).addAttributes(bbattributes); } - for (Map.Entry userMap : getUserNameMap().entrySet()) { + for (Map.Entry userMap : getUserNameMap().entrySet()) { String sid = ""; - try{ + try { sid = userMap.getKey(); String userName = userMap.getValue(); // Accounts in the SAM are all local accounts createOrUpdateOsAccount(regFile, sid, userName, null, null, OsAccountRealm.RealmScope.LOCAL); - } catch(TskCoreException | TskDataException | NotUserSIDException ex) { + } catch (TskCoreException | TskDataException | NotUserSIDException ex) { logger.log(Level.WARNING, String.format("Failed to update Domain for existing OsAccount: %s, sid: %s", regFile.getId(), sid), ex); } } @@ -783,29 +783,34 @@ class ExtractRegistry extends Extract { break; case "usb": //NON-NLS try { - Long usbMtime = Long.parseLong(artnode.getAttribute("mtime")); //NON-NLS - usbMtime = Long.valueOf(usbMtime.toString()); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, usbMtime)); - String dev = artnode.getAttribute("dev"); //NON-NLS - String make = ""; - String model = dev; - if (dev.toLowerCase().contains("vid")) { //NON-NLS - USBInfo info = USB_MAPPER.parseAndLookup(dev); - if (info.getVendor() != null) { - make = info.getVendor(); + Long usbMtime = Long.valueOf("0"); + if (!artnode.getAttribute("mtime").isEmpty()) { + usbMtime = Long.parseLong(artnode.getAttribute("mtime")); //NON-NLS + } + usbMtime = Long.valueOf(usbMtime.toString()); + if (usbMtime > 0) { + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, parentModuleName, usbMtime)); } - if (info.getProduct() != null) { - model = info.getProduct(); + String dev = artnode.getAttribute("dev"); //NON-NLS + String make = ""; + String model = dev; + if (dev.toLowerCase().contains("vid")) { //NON-NLS + USBInfo info = USB_MAPPER.parseAndLookup(dev); + if (info.getVendor() != null) { + make = info.getVendor(); + } + if (info.getProduct() != null) { + model = info.getProduct(); + } } + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, parentModuleName, make)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, parentModuleName, model)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID, parentModuleName, value)); + newArtifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_DEVICE_ATTACHED, regFile, bbattributes)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Error adding device_attached artifact to blackboard for file %d.", regFile.getId()), ex); //NON-NLS } - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, parentModuleName, make)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, parentModuleName, model)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_ID, parentModuleName, value)); - newArtifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED, regFile, bbattributes)); - } catch (TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Error adding device_attached artifact to blackboard for file %d.", regFile.getId()), ex); //NON-NLS - } - break; + break; case "uninstall": //NON-NLS Long itemMtime = null; try { @@ -839,7 +844,7 @@ class ExtractRegistry extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, parentModuleName, value)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, parentModuleName, artnode.getNodeName())); BlackboardArtifact bbart = regFile.newDataArtifact(new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_RECENT_OBJECT), bbattributes); - + newArtifacts.add(bbart); } catch (TskCoreException ex) { logger.log(Level.SEVERE, "Error adding recent object artifact to blackboard.", ex); //NON-NLS @@ -866,18 +871,18 @@ class ExtractRegistry extends Extract { String sid = artnode.getAttribute("sid"); //NON-NLS String username = artnode.getAttribute("username"); //NON-NLS String domName = domainName; - + // accounts in profileList can be either domain or local // Assume domain unless the SID was seen before in the SAM (which is only local). OsAccountRealm.RealmScope scope = OsAccountRealm.RealmScope.DOMAIN; - if(isDomainIdInSAMList(sid)) { + if (isDomainIdInSAMList(sid)) { domName = null; scope = OsAccountRealm.RealmScope.LOCAL; } - - try{ + + try { createOrUpdateOsAccount(regFile, sid, username, homeDir, domName, scope); - } catch(TskCoreException | TskDataException | NotUserSIDException ex) { + } catch (TskCoreException | TskDataException | NotUserSIDException ex) { logger.log(Level.SEVERE, String.format("Failed to create OsAccount for file: %s, sid: %s", regFile.getId(), sid), ex); } break; @@ -886,7 +891,7 @@ class ExtractRegistry extends Extract { try { String localPath = artnode.getAttribute("localPath"); //NON-NLS String remoteName = value; - + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LOCAL_PATH, parentModuleName, localPath)); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_REMOTE_PATH, @@ -942,7 +947,7 @@ class ExtractRegistry extends Extract { } } catch (IOException ex) { } - + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(newArtifacts); } @@ -1013,9 +1018,9 @@ class ExtractRegistry extends Extract { addBlueToothAttribute(line, attributes, TSK_DATETIME); line = reader.readLine(); addBlueToothAttribute(line, attributes, TSK_DATETIME_ACCESSED); - + try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING, regFile, attributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_BLUETOOTH_PAIRING, regFile, attributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create bluetooth_pairing artifact for file %d", regFile.getId()), ex); } @@ -1029,7 +1034,7 @@ class ExtractRegistry extends Extract { } } } - + if (!bbartifacts.isEmpty() && !context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } @@ -1056,9 +1061,9 @@ class ExtractRegistry extends Extract { // we set the timestamp to 0 and continue on processing logger.log(Level.WARNING, String.format("Failed to parse date/time %s for Bluetooth Last Seen attribute.", dateString), ex); //NON-NLS } - attributes.add(new BlackboardAttribute(attributeType, getName(), dateLong)); + attributes.add(new BlackboardAttribute(attributeType, getDisplayName(), dateLong)); } else { - attributes.add(new BlackboardAttribute(attributeType, getName(), tokenString)); + attributes.add(new BlackboardAttribute(attributeType, getDisplayName(), tokenString)); } } } @@ -1074,7 +1079,7 @@ class ExtractRegistry extends Extract { * @return true if successful, false if parsing failed at some point */ private boolean parseSamPluginOutput(String regFilePath, AbstractFile regAbstractFile, long ingestJobId) { - + File regfile = new File(regFilePath); List newArtifacts = new ArrayList<>(); try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(regfile), StandardCharsets.UTF_8))) { @@ -1103,31 +1108,31 @@ class ExtractRegistry extends Extract { userInfoMap.put(sid, userInfo); addSIDToSAMList(sid); } - + // New OsAccount Code OsAccountManager accountMgr = tskCase.getOsAccountManager(); HostManager hostMrg = tskCase.getHostManager(); - Host host = hostMrg.getHostByDataSource((DataSource)dataSource); + Host host = hostMrg.getHostByDataSource((DataSource) dataSource); List existingAccounts = accountMgr.getOsAccounts(host); - for(OsAccount osAccount: existingAccounts) { + for (OsAccount osAccount : existingAccounts) { Optional optional = osAccount.getAddr(); - if(!optional.isPresent()) { + if (!optional.isPresent()) { continue; } - + String sid = optional.get(); - Map userInfo = userInfoMap.remove(sid); - if(userInfo != null) { - addAccountInstance(accountMgr, osAccount, (DataSource)dataSource); + Map userInfo = userInfoMap.remove(sid); + if (userInfo != null) { + addAccountInstance(accountMgr, osAccount, (DataSource) dataSource); updateOsAccount(osAccount, userInfo, groupMap.get(sid), regAbstractFile); } } - + //add remaining userinfos as accounts; for (Map userInfo : userInfoMap.values()) { OsAccount osAccount = accountMgr.newWindowsOsAccount(userInfo.get(SID_KEY), null, null, host, OsAccountRealm.RealmScope.LOCAL); - accountMgr.newOsAccountInstance(osAccount, (DataSource)dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); + accountMgr.newOsAccountInstance(osAccount, (DataSource) dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); updateOsAccount(osAccount, userInfo, groupMap.get(userInfo.get(SID_KEY)), regAbstractFile); } return true; @@ -1137,10 +1142,9 @@ class ExtractRegistry extends Extract { logger.log(Level.WARNING, "Error building the document parser: {0}", ex); //NON-NLS } catch (TskDataException | TskCoreException ex) { logger.log(Level.WARNING, "Error updating TSK_OS_ACCOUNT artifacts to include newly parsed data.", ex); //NON-NLS - } catch (OsAccountManager.NotUserSIDException ex) { + } catch (OsAccountManager.NotUserSIDException ex) { logger.log(Level.WARNING, "Error creating OS Account, input SID is not a user SID.", ex); //NON-NLS - } - finally { + } finally { if (!context.dataSourceIngestIsCancelled()) { postArtifacts(newArtifacts); } @@ -1276,13 +1280,13 @@ class ExtractRegistry extends Extract { } Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), fileName)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), userName)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, getName(), progRunDateTime)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, getName(), comment)); - + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME, getDisplayName(), userName)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME, getDisplayName(), progRunDateTime)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COMMENT, getDisplayName(), comment)); + try { - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_PROG_RUN, regFile, attributes); + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_PROG_RUN, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { @@ -1294,7 +1298,7 @@ class ExtractRegistry extends Extract { line = reader.readLine(); } if (!bbartifacts.isEmpty() && !context.dataSourceIngestIsCancelled()) { - postArtifacts(bbartifacts); + postArtifacts(bbartifacts); } } @@ -1345,11 +1349,11 @@ class ExtractRegistry extends Extract { } } Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), adobeUsedTime)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getDisplayName(), adobeUsedTime)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); fileName = fileName.replace("\0", ""); @@ -1358,7 +1362,7 @@ class ExtractRegistry extends Extract { bbartifacts.add(bba); } } - } catch(TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); @@ -1398,10 +1402,10 @@ class ExtractRegistry extends Extract { String tokens[] = line.split("> "); String fileName = tokens[1]; Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); bba = createAssociatedArtifact(fileName, bba); @@ -1413,7 +1417,7 @@ class ExtractRegistry extends Extract { } } } - } catch(TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); @@ -1421,7 +1425,7 @@ class ExtractRegistry extends Extract { line = line.trim(); } } - if (!bbartifacts.isEmpty()&& !context.dataSourceIngestIsCancelled()) { + if (!bbartifacts.isEmpty() && !context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } @@ -1455,10 +1459,10 @@ class ExtractRegistry extends Extract { if (tokens.length > 1) { String fileName = tokens[1]; Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); if (bba != null) { bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); @@ -1466,7 +1470,7 @@ class ExtractRegistry extends Extract { bbartifacts.add(bba); } } - } catch(TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } } @@ -1508,16 +1512,16 @@ class ExtractRegistry extends Extract { String tokens[] = line.split("> "); String fileName = tokens[1]; Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } - } catch(TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); @@ -1553,17 +1557,17 @@ class ExtractRegistry extends Extract { // String fileName = line; Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } - - } catch(TskCoreException ex) { + + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); @@ -1605,17 +1609,17 @@ class ExtractRegistry extends Extract { String fileNameTokens[] = tokens[4].split(" - "); String fileName = fileNameTokens[1]; Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), docDate)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getDisplayName(), docDate)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } - } catch(TskCoreException ex) { + } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = reader.readLine(); @@ -1670,18 +1674,18 @@ class ExtractRegistry extends Extract { logger.log(Level.WARNING, String.format("Failed to parse date/time %s for TrustRecords artifact.", tokens[0]), ex); //NON-NLS } Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), fileName)); - attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getName(), usedTime)); - attributes.add(new BlackboardAttribute(TSK_COMMENT, getName(), comment)); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, regFile, attributes); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), fileName)); + attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, getDisplayName(), usedTime)); + attributes.add(new BlackboardAttribute(TSK_COMMENT, getDisplayName(), comment)); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, regFile, attributes); bbartifacts.add(bba); bba = createAssociatedArtifact(FilenameUtils.normalize(fileName, true), bba); if (bba != null) { bbartifacts.add(bba); } - } catch(TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", regFile.getId()), ex); } line = line.trim(); } @@ -1720,8 +1724,8 @@ class ExtractRegistry extends Extract { } /** - * Create a map of userids to usernames for all OS Accounts associated with the current - * host in OsAccountManager. + * Create a map of userids to usernames for all OS Accounts associated with + * the current host in OsAccountManager. * * @param dataSource * @@ -1732,55 +1736,53 @@ class ExtractRegistry extends Extract { private Map makeUserNameMap(Content dataSource) throws TskCoreException { Map map = new HashMap<>(); - for(OsAccount account: tskCase.getOsAccountManager().getOsAccounts(((DataSource)dataSource).getHost())) { + for (OsAccount account : tskCase.getOsAccountManager().getOsAccounts(((DataSource) dataSource).getHost())) { Optional userName = account.getLoginName(); String address = account.getAddr().orElse(""); - if(!address.isEmpty()) { + if (!address.isEmpty()) { map.put(address, userName.isPresent() ? userName.get() : ""); } } return map; } - + /** * Strip the machine sid off of the osAccountSID. The returned string will * include everything in the osAccountSID up to the last -. - * - * There must be at least three dashes in the SID for it to be useful. - * The sid is of a format S-R-X-Y1 where Y1 is the domain identifier which - * may contain multiple dashes. Everything after the final dash is the - * relative identifier. For example - * S-1-5-21-1004336348-1177238915-682003330-512 - * - * In this example the domain identifier is - * 21-1004336348-1177238915-682003330 - * The relative identifier is 512. - * + * + * There must be at least three dashes in the SID for it to be useful. The + * sid is of a format S-R-X-Y1 where Y1 is the domain identifier which may + * contain multiple dashes. Everything after the final dash is the relative + * identifier. For example S-1-5-21-1004336348-1177238915-682003330-512 + * + * In this example the domain identifier is + * 21-1004336348-1177238915-682003330 The relative identifier is 512. + * * In other words everything between the third and last dash is the domain * identifier. - * + * * @param osAccountSID The SID of the os account. - * + * * @return The Machine SID */ private String stripRelativeIdentifierFromSID(String osAccountSID) { - if(osAccountSID.split("-").length > 4) { + if (osAccountSID.split("-").length > 4) { int index = osAccountSID.lastIndexOf('-'); return index > 1 ? osAccountSID.substring(0, index) : ""; } return ""; } - + private final List machineSIDs = new ArrayList<>(); - + /** * Returns a mapping of user sids to user names. - * + * * @return SID to username map. Will be empty if none where found. */ private Map getUserNameMap() { - if(userNameMap == null) { + if (userNameMap == null) { // Get a mapping of user sids to user names and save globally so it can be used for other areas // of the registry, ie: BAM key try { @@ -1792,7 +1794,7 @@ class ExtractRegistry extends Extract { userNameMap = new HashMap<>(); } } - + return userNameMap; } @@ -1820,42 +1822,39 @@ class ExtractRegistry extends Extract { */ void createShellBagArtifacts(AbstractFile regFile, List shellbags) throws TskCoreException { List artifacts = new ArrayList<>(); - List dataArtifacts = new ArrayList<>(); try { for (ShellBag bag : shellbags) { Collection attributes = new ArrayList<>(); - attributes.add(new BlackboardAttribute(TSK_PATH, getName(), bag.getResource())); - attributes.add(new BlackboardAttribute(getKeyAttribute(), getName(), bag.getKey())); + attributes.add(new BlackboardAttribute(TSK_PATH, getDisplayName(), bag.getResource())); + attributes.add(new BlackboardAttribute(getKeyAttribute(), getDisplayName(), bag.getKey())); long time; time = bag.getLastWrite(); if (time != 0) { - attributes.add(new BlackboardAttribute(getLastWriteAttribute(), getName(), time)); + attributes.add(new BlackboardAttribute(getLastWriteAttribute(), getDisplayName(), time)); } time = bag.getModified(); if (time != 0) { - attributes.add(new BlackboardAttribute(TSK_DATETIME_MODIFIED, getName(), time)); + attributes.add(new BlackboardAttribute(TSK_DATETIME_MODIFIED, getDisplayName(), time)); } time = bag.getCreated(); if (time != 0) { - attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, getName(), time)); + attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, getDisplayName(), time)); } time = bag.getAccessed(); if (time != 0) { - attributes.add(new BlackboardAttribute(TSK_DATETIME_ACCESSED, getName(), time)); + attributes.add(new BlackboardAttribute(TSK_DATETIME_ACCESSED, getDisplayName(), time)); } - BlackboardArtifact artifact = createArtifactWithAttributes(getShellBagArtifact(), regFile, attributes); + BlackboardArtifact artifact = createArtifactWithAttributes(getShellBagArtifact(), regFile, attributes); artifacts.add(artifact); - dataArtifacts.add((DataArtifact)artifact); } } finally { - if(!context.dataSourceIngestIsCancelled()) { - postArtifacts(artifacts); - context.addDataArtifactsToJob(dataArtifacts); + if (!context.dataSourceIngestIsCancelled()) { + postArtifacts(artifacts); } } } @@ -1874,7 +1873,7 @@ class ExtractRegistry extends Extract { shellBagArtifactType = tskCase.getBlackboard().getOrAddArtifactType(SHELLBAG_ARTIFACT_NAME, Bundle.Shellbag_Artifact_Display_Name()); } catch (BlackboardException ex) { throw new TskCoreException(String.format("Failed to get shell bag artifact type", SHELLBAG_ARTIFACT_NAME), ex); - } + } } return shellBagArtifactType; @@ -1945,7 +1944,7 @@ class ExtractRegistry extends Extract { if (line.contains("Group Name")) { String value = line.replaceAll("Group Name\\s*?:", "").trim(); groupName = (value.replaceAll("\\[\\d*?\\]", "")).trim(); - int startIndex = value.indexOf(" [") + 1; + int startIndex = value.indexOf(" [") + 1; int endIndex = value.indexOf(']'); if (startIndex != -1 && endIndex != -1) { @@ -2007,9 +2006,8 @@ class ExtractRegistry extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; progressBar.progress(Bundle.Progress_Message_Analyze_Registry()); analyzeRegistryFiles(context.getJobId()); @@ -2024,7 +2022,7 @@ class ExtractRegistry extends Extract { public String autopsyPlugins = ""; public String fullPlugins = ""; } - + /** * Updates an existing or creates a new OsAccount with the given attributes. * @@ -2040,19 +2038,19 @@ class ExtractRegistry extends Extract { private void createOrUpdateOsAccount(AbstractFile file, String sid, String userName, String homeDir, String domainName, OsAccountRealm.RealmScope realmScope) throws TskCoreException, TskDataException, NotUserSIDException { OsAccountManager accountMgr = tskCase.getOsAccountManager(); HostManager hostMrg = tskCase.getHostManager(); - Host host = hostMrg.getHostByDataSource((DataSource)dataSource); + Host host = hostMrg.getHostByDataSource((DataSource) dataSource); Optional optional = accountMgr.getWindowsOsAccount(sid, null, null, host); OsAccount osAccount; if (!optional.isPresent()) { osAccount = accountMgr.newWindowsOsAccount(sid, userName != null && userName.isEmpty() ? null : userName, domainName, host, realmScope); - accountMgr.newOsAccountInstance(osAccount, (DataSource)dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); + accountMgr.newOsAccountInstance(osAccount, (DataSource) dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); } else { osAccount = optional.get(); - addAccountInstance(accountMgr, osAccount, (DataSource)dataSource); - if (userName != null && !userName.isEmpty()) { - OsAccountUpdateResult updateResult= accountMgr.updateCoreWindowsOsAccountAttributes(osAccount, null, userName, (domainName == null || domainName.isEmpty()) ? null : domainName, host); - osAccount = updateResult.getUpdatedAccount().orElse(osAccount); + addAccountInstance(accountMgr, osAccount, (DataSource) dataSource); + if (userName != null && !userName.isEmpty()) { + OsAccountUpdateResult updateResult = accountMgr.updateCoreWindowsOsAccountAttributes(osAccount, null, userName, (domainName == null || domainName.isEmpty()) ? null : domainName, host); + osAccount = updateResult.getUpdatedAccount().orElse(osAccount); } } @@ -2115,7 +2113,7 @@ class ExtractRegistry extends Extract { * @throws TskCoreException */ private void updateOsAccount(OsAccount osAccount, Map userInfo, List groupList, AbstractFile regFile) throws TskDataException, TskCoreException, NotUserSIDException { - Host host = ((DataSource)dataSource).getHost(); + Host host = ((DataSource) dataSource).getHost(); SimpleDateFormat regRipperTimeFormat = new SimpleDateFormat("EEE MMM dd HH:mm:ss yyyy 'Z'", US); regRipperTimeFormat.setTimeZone(getTimeZone("GMT")); @@ -2123,7 +2121,7 @@ class ExtractRegistry extends Extract { List attributes = new ArrayList<>(); Long creationTime = null; - + String value = userInfo.get(ACCOUNT_CREATED_KEY); if (value != null && !value.isEmpty() && !value.equals(NEVER_DATE)) { creationTime = parseRegRipTime(value); @@ -2138,9 +2136,9 @@ class ExtractRegistry extends Extract { osAccount, host, regFile)); } } - + String loginName = null; - value = userInfo.get(USERNAME_KEY); + value = userInfo.get(USERNAME_KEY); if (value != null && !value.isEmpty()) { loginName = value; } @@ -2157,7 +2155,7 @@ class ExtractRegistry extends Extract { // and "Default Guest Acct" value = userInfo.get(ACCOUNT_TYPE_KEY); if (value != null && !value.isEmpty() && value.toLowerCase().contains("admin")) { - attributes.add(createOsAccountAttribute(ATTRIBUTE_TYPE.TSK_IS_ADMIN, + attributes.add(createOsAccountAttribute(ATTRIBUTE_TYPE.TSK_IS_ADMIN, 1, osAccount, host, regFile)); } @@ -2241,22 +2239,21 @@ class ExtractRegistry extends Extract { // add the attributes to account. OsAccountManager accountMgr = tskCase.getOsAccountManager(); accountMgr.addExtendedOsAccountAttributes(osAccount, attributes); - + // update the loginname accountMgr.updateCoreWindowsOsAccountAttributes(osAccount, null, loginName, null, host); - + // update other standard attributes - fullname, creationdate accountMgr.updateStandardOsAccountAttributes(osAccount, fullName, null, null, creationTime); - - + } - + /** * Create comma separated list from the set values for the given keys. - * + * * @param keys List of map keys. * @param map Data map. - * + * * @return Comma separated String of values. */ private String getSettingsFromMap(String[] keys, Map map) { @@ -2320,38 +2317,39 @@ class ExtractRegistry extends Extract { private OsAccountAttribute createOsAccountAttribute(BlackboardAttribute.ATTRIBUTE_TYPE type, Integer value, OsAccount osAccount, Host host, AbstractFile file) { return osAccount.new OsAccountAttribute(new BlackboardAttribute.Type(type), value, osAccount, host, file); } - + /** - * Adds an account instance for the given data source if one does not already - * exist. - * + * Adds an account instance for the given data source if one does not + * already exist. + * * @param accountMgr * @param osAccount * @param dataSource - * @throws TskCoreException + * + * @throws TskCoreException */ private void addAccountInstance(OsAccountManager accountMgr, OsAccount osAccount, DataSource dataSource) throws TskCoreException { accountMgr.newOsAccountInstance(osAccount, dataSource, OsAccountInstance.OsAccountInstanceType.LAUNCHED); } - + /** * Add the domainId of the given account sid to the sam domain id list. - * + * * @param sid OS account sid */ private void addSIDToSAMList(String sid) { String relativeID = stripRelativeIdentifierFromSID(sid); - if(!relativeID.isEmpty() && !samDomainIDsList.contains(relativeID)) { + if (!relativeID.isEmpty() && !samDomainIDsList.contains(relativeID)) { samDomainIDsList.add(relativeID); } } - + /** - * Returns true if the domain id of the os account sid is in the list - * of domain ids seen when parsing the sam file. - * + * Returns true if the domain id of the os account sid is in the list of + * domain ids seen when parsing the sam file. + * * @param osAccountSID - * + * * @return If the domainID is in the same file list. */ private boolean isDomainIdInSAMList(String osAccountSID) { diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java index 429666f8d6..44cef5b3e5 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSafari.java @@ -64,6 +64,7 @@ import org.xml.sax.SAXException; final class ExtractSafari extends Extract { private final IngestServices services = IngestServices.getInstance(); + private final IngestJobContext context; // visit_time uses an epoch of Jan 1, 2001 thus the addition of 978307200 private static final String HISTORY_QUERY = "SELECT url, title, visit_time + 978307200 as time FROM 'history_items' JOIN history_visits ON history_item = history_items.id;"; //NON-NLS @@ -91,65 +92,64 @@ final class ExtractSafari extends Extract { private static final Logger LOG = Logger.getLogger(ExtractSafari.class.getName()); @Messages({ - "ExtractSafari_Module_Name=Safari", + "ExtractSafari_Module_Name=Safari Analyzer", "ExtractSafari_Error_Getting_History=An error occurred while processing Safari history files.", "ExtractSafari_Error_Parsing_Bookmark=An error occured while processing Safari Bookmark files", "ExtractSafari_Error_Parsing_Cookies=An error occured while processing Safari Cookies files", "Progress_Message_Safari_History=Safari History", "Progress_Message_Safari_Bookmarks=Safari Bookmarks", "Progress_Message_Safari_Cookies=Safari Cookies", - "Progress_Message_Safari_Downloads=Safari Downloads", - }) + "Progress_Message_Safari_Downloads=Safari Downloads",}) - @Override - protected String getName() { - return Bundle.ExtractSafari_Module_Name(); + ExtractSafari(IngestJobContext context) { + super(Bundle.ExtractSafari_Module_Name(), context); + this.context = context; } @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { setFoundData(false); - + progressBar.progress(Bundle.Progress_Message_Safari_Cookies()); try { - processHistoryDB(dataSource, context); + processHistoryDB(dataSource); } catch (IOException | TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Getting_History()); LOG.log(Level.SEVERE, "Exception thrown while processing history file.", ex); //NON-NLS } - + if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Safari_Bookmarks()); try { - processBookmarkPList(dataSource, context); + processBookmarkPList(dataSource); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Bookmarks file.", ex); //NON-NLS } - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Safari_Downloads()); try { - processDownloadsPList(dataSource, context); + processDownloadsPList(dataSource); } catch (IOException | TskCoreException | SAXException | PropertyListFormatException | ParseException | ParserConfigurationException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Bookmark()); LOG.log(Level.SEVERE, "Exception thrown while parsing Safari Download.plist file.", ex); //NON-NLS } - + if (context.dataSourceIngestIsCancelled()) { return; } progressBar.progress(Bundle.Progress_Message_Safari_Cookies()); try { - processBinaryCookieFile(dataSource, context); + processBinaryCookieFile(dataSource); } catch (TskCoreException ex) { this.addErrorMessage(Bundle.ExtractSafari_Error_Parsing_Cookies()); LOG.log(Level.SEVERE, "Exception thrown while processing Safari cookies file.", ex); //NON-NLS @@ -163,7 +163,7 @@ final class ExtractSafari extends Extract { * @throws TskCoreException * @throws IOException */ - private void processHistoryDB(Content dataSource, IngestJobContext context) throws TskCoreException, IOException { + private void processHistoryDB(Content dataSource) throws TskCoreException, IOException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); List historyFiles = fileManager.findFiles(dataSource, HISTORY_FILE_NAME, SAFARI_FOLDER); @@ -179,22 +179,24 @@ final class ExtractSafari extends Extract { break; } - getHistory(context, historyFile); + getHistory(historyFile); } } /** * Finds all Bookmark.plist files and looks for bookmark entries. + * * @param dataSource - * @param context - * @throws TskCoreException + * + * throws TskCoreException + * * @throws IOException * @throws SAXException * @throws PropertyListFormatException * @throws ParseException * @throws ParserConfigurationException */ - private void processBookmarkPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + private void processBookmarkPList(Content dataSource) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); List files = fileManager.findFiles(dataSource, BOOKMARK_FILE_NAME, SAFARI_FOLDER); @@ -210,23 +212,24 @@ final class ExtractSafari extends Extract { break; } - getBookmarks(context, file); + getBookmarks(file); } } - + /** * Process the safari download.plist file. - * + * * @param dataSource - * @param context - * @throws TskCoreException + * + * throws TskCoreException + * * @throws IOException * @throws SAXException * @throws PropertyListFormatException * @throws ParseException * @throws ParserConfigurationException */ - private void processDownloadsPList(Content dataSource, IngestJobContext context) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + private void processDownloadsPList(Content dataSource) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); List files = fileManager.findFiles(dataSource, DOWNLOAD_FILE_NAME, SAFARI_FOLDER); @@ -242,18 +245,20 @@ final class ExtractSafari extends Extract { break; } - getDownloads(dataSource, context, file); + getDownloads(dataSource, file); } } - + /** * Process the Safari Cookie file. + * * @param dataSource - * @param context - * @throws TskCoreException + * + * throws TskCoreException + * * @throws IOException */ - private void processBinaryCookieFile(Content dataSource, IngestJobContext context) throws TskCoreException { + private void processBinaryCookieFile(Content dataSource) throws TskCoreException { FileManager fileManager = getCurrentCase().getServices().getFileManager(); List files = fileManager.findFiles(dataSource, COOKIE_FILE_NAME, COOKIE_FOLDER); @@ -269,10 +274,10 @@ final class ExtractSafari extends Extract { break; } try { - getCookies(context, file); + getCookies(file); } catch (IOException ex) { - LOG.log(Level.WARNING, String.format("Failed to get cookies from file %s", Paths.get(file.getUniquePath(), file.getName()).toString()), ex); - } + LOG.log(Level.WARNING, String.format("Failed to get cookies from file %s", Paths.get(file.getUniquePath(), file.getName()).toString()), ex); + } } } @@ -281,26 +286,17 @@ final class ExtractSafari extends Extract { * BlackboardArtifacts for the history information in the file. * * @param historyFile AbstractFile version of the history file from the case + * * @throws TskCoreException * @throws IOException */ - private void getHistory(IngestJobContext context, AbstractFile historyFile) throws TskCoreException, IOException { + private void getHistory(AbstractFile historyFile) throws TskCoreException, IOException { if (historyFile.getSize() == 0) { return; } - - File tempHistoryFile = createTemporaryFile(context, historyFile, context.getJobId()); - + File tempHistoryFile = createTemporaryFile(historyFile); try { - ContentUtils.writeToFile(historyFile, tempHistoryFile, context::dataSourceIngestIsCancelled); - } catch (IOException ex) { - throw new IOException("Error writingToFile: " + historyFile, ex); //NON-NLS - } - - try { - if(!context.dataSourceIngestIsCancelled()) { - postArtifacts(getHistoryArtifacts(historyFile, tempHistoryFile.toPath(), context)); - } + postArtifacts(getHistoryArtifacts(historyFile, tempHistoryFile.toPath())); } finally { tempHistoryFile.delete(); } @@ -310,8 +306,8 @@ final class ExtractSafari extends Extract { * Creates a temporary bookmark file from the AbstractFile and creates * BlackboardArtifacts for the any bookmarks found. * - * @param context IngestJobContext object * @param file AbstractFile from case + * * @throws TskCoreException * @throws IOException * @throws SAXException @@ -319,29 +315,24 @@ final class ExtractSafari extends Extract { * @throws ParseException * @throws ParserConfigurationException */ - private void getBookmarks(IngestJobContext context, AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + private void getBookmarks(AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { if (file.getSize() == 0) { return; } - - File tempFile = createTemporaryFile(context, file, context.getJobId()); - + File tempFile = createTemporaryFile(file); try { - if(!context.dataSourceIngestIsCancelled()) { - postArtifacts(getBookmarkArtifacts(file, tempFile, context)); - } + postArtifacts(getBookmarkArtifacts(file, tempFile)); } finally { tempFile.delete(); } - } - + /** * Creates a temporary downloads file from the AbstractFile and creates * BlackboardArtifacts for the any downloads found. * - * @param context IngestJobContext object * @param file AbstractFile from case + * * @throws TskCoreException * @throws IOException * @throws SAXException @@ -349,35 +340,28 @@ final class ExtractSafari extends Extract { * @throws ParseException * @throws ParserConfigurationException */ - private void getDownloads(Content dataSource, IngestJobContext context, AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { + private void getDownloads(Content dataSource, AbstractFile file) throws TskCoreException, IOException, SAXException, PropertyListFormatException, ParseException, ParserConfigurationException { if (file.getSize() == 0) { return; } - - File tempFile = createTemporaryFile(context, file, context.getJobId()); - + File tempFile = createTemporaryFile(file); try { - if(!context.dataSourceIngestIsCancelled()) { - postArtifacts(getDownloadArtifacts(dataSource, file, tempFile)); - } + postArtifacts(getDownloadArtifacts(dataSource, file, tempFile)); } finally { - if (tempFile != null) { - tempFile.delete(); - } + tempFile.delete(); } - } - + /** - * Creates a temporary copy of the Cookie file and creates a list of cookie + * Creates a temporary copy of the Cookie file and creates a list of cookie * BlackboardArtifacts. - * - * @param context IngetstJobContext + * * @param file Original Cookie file from the case + * * @throws TskCoreException * @throws IOException */ - private void getCookies(IngestJobContext context, AbstractFile file) throws TskCoreException, IOException { + private void getCookies(AbstractFile file) throws TskCoreException, IOException { if (file.getSize() == 0) { return; } @@ -385,12 +369,12 @@ final class ExtractSafari extends Extract { File tempFile = null; try { - tempFile = createTemporaryFile(context, file, context.getJobId()); + tempFile = createTemporaryFile(file); - if(!context.dataSourceIngestIsCancelled()) { - postArtifacts(getCookieArtifacts(file, tempFile, context)); + if (!context.dataSourceIngestIsCancelled()) { + postArtifacts(getCookieArtifacts(file, tempFile)); } - + } finally { if (tempFile != null) { tempFile.delete(); @@ -402,14 +386,16 @@ final class ExtractSafari extends Extract { * Queries the history db for the history information creating a list of * BlackBoardArtifact for each row returned from the db. * - * @param origFile AbstractFile of the history file from the case + * @param origFile AbstractFile of the history file from the case * @param tempFilePath Path to temporary copy of the history db + * * @return Blackboard Artifacts for the history db or null if there are no - * history artifacts + * history artifacts + * * @throws TskCoreException */ - private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath, IngestJobContext context) throws TskCoreException { - List> historyList = this.dbConnect(tempFilePath.toString(), HISTORY_QUERY); + private Collection getHistoryArtifacts(AbstractFile origFile, Path tempFilePath) throws TskCoreException { + List> historyList = this.querySQLiteDb(tempFilePath.toString(), HISTORY_QUERY); if (historyList == null || historyList.isEmpty()) { return null; @@ -420,17 +406,17 @@ final class ExtractSafari extends Extract { if (context.dataSourceIngestIsCancelled()) { return bbartifacts; } - + String url = row.get(HEAD_URL).toString(); String title = row.get(HEAD_TITLE).toString(); Long time = (Double.valueOf(row.get(HEAD_TIME).toString())).longValue(); bbartifacts.add( createArtifactWithAttributes( - TSK_WEB_HISTORY, - origFile, - createHistoryAttribute(url, time, null, title, - this.getName(), NetworkUtils.extractDomain(url), null))); + BlackboardArtifact.Type.TSK_WEB_HISTORY, + origFile, + createHistoryAttributes(url, time, null, title, + this.getDisplayName(), NetworkUtils.extractDomain(url), null))); } return bbartifacts; @@ -441,7 +427,9 @@ final class ExtractSafari extends Extract { * * @param origFile The origFile Bookmark.plist file from the case * @param tempFile The temporary local version of Bookmark.plist + * * @return Collection of BlackboardArtifacts for the bookmarks in origFile + * * @throws IOException * @throws PropertyListFormatException * @throws ParseException @@ -449,13 +437,13 @@ final class ExtractSafari extends Extract { * @throws SAXException * @throws TskCoreException */ - private Collection getBookmarkArtifacts(AbstractFile origFile, File tempFile, IngestJobContext context) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { + private Collection getBookmarkArtifacts(AbstractFile origFile, File tempFile) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { Collection bbartifacts = new ArrayList<>(); try { NSDictionary root = (NSDictionary) PropertyListParser.parse(tempFile); - parseBookmarkDictionary(bbartifacts, origFile, root, context); + parseBookmarkDictionary(bbartifacts, origFile, root); } catch (PropertyListFormatException ex) { PropertyListFormatException plfe = new PropertyListFormatException(origFile.getName() + ": " + ex.getMessage()); plfe.setStackTrace(ex.getStackTrace()); @@ -476,46 +464,51 @@ final class ExtractSafari extends Extract { return bbartifacts; } - + /** - * Finds the download entries in the tempFile and creates a list of artifacts from them. - * + * Finds the download entries in the tempFile and creates a list of + * artifacts from them. + * * @param origFile Download.plist file from case * @param tempFile Temporary copy of download.plist file + * * @return Collection of BlackboardArtifacts for the downloads in origFile + * * @throws IOException * @throws PropertyListFormatException * @throws ParseException * @throws ParserConfigurationException * @throws SAXException - * @throws TskCoreException + * @throws TskCoreException */ - private Collection getDownloadArtifacts(Content dataSource, AbstractFile origFile, File tempFile)throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { - Collection bbartifacts = null; + private Collection getDownloadArtifacts(Content dataSource, AbstractFile origFile, File tempFile) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException, TskCoreException { + Collection bbartifacts = null; try { - while(true){ - NSDictionary root = (NSDictionary)PropertyListParser.parse(tempFile); + while (true) { + NSDictionary root = (NSDictionary) PropertyListParser.parse(tempFile); - if(root == null) + if (root == null) { break; + } - NSArray nsArray = (NSArray)root.get(PLIST_KEY_DOWNLOAD_HISTORY); + NSArray nsArray = (NSArray) root.get(PLIST_KEY_DOWNLOAD_HISTORY); - if(nsArray == null) + if (nsArray == null) { break; - + } + NSObject[] objectArray = nsArray.getArray(); bbartifacts = new ArrayList<>(); - for(NSObject obj: objectArray){ - if(obj instanceof NSDictionary){ - bbartifacts.addAll(parseDownloadDictionary(dataSource, origFile, (NSDictionary)obj)); + for (NSObject obj : objectArray) { + if (obj instanceof NSDictionary) { + bbartifacts.addAll(parseDownloadDictionary(dataSource, origFile, (NSDictionary) obj)); } } break; } - + } catch (PropertyListFormatException ex) { PropertyListFormatException plfe = new PropertyListFormatException(origFile.getName() + ": " + ex.getMessage()); plfe.setStackTrace(ex.getStackTrace()); @@ -536,18 +529,20 @@ final class ExtractSafari extends Extract { return bbartifacts; } - + /** * Finds the cookies in the tempFile creating a list of BlackboardArtifacts * each representing one cookie. * * @param origFile Original Cookies.binarycookie file from case * @param tempFile Temporary copy of the cookies file + * * @return List of Blackboard Artifacts, one for each cookie + * * @throws TskCoreException * @throws IOException */ - private Collection getCookieArtifacts(AbstractFile origFile, File tempFile, IngestJobContext context) throws TskCoreException, IOException { + private Collection getCookieArtifacts(AbstractFile origFile, File tempFile) throws TskCoreException, IOException { Collection bbartifacts = null; BinaryCookieReader reader = BinaryCookieReader.initalizeReader(tempFile); @@ -559,20 +554,20 @@ final class ExtractSafari extends Extract { if (context.dataSourceIngestIsCancelled()) { return bbartifacts; } - + Cookie cookie = iter.next(); - + bbartifacts.add( createArtifactWithAttributes( - TSK_WEB_COOKIE, - origFile, + BlackboardArtifact.Type.TSK_WEB_COOKIE, + origFile, createCookieAttributes( - cookie.getURL(), - cookie.getCreationDate(), - null, - cookie.getExpirationDate(), - cookie.getName(), cookie.getValue(), - this.getName(), + cookie.getURL(), + cookie.getCreationDate(), + null, + cookie.getExpirationDate(), + cookie.getName(), cookie.getValue(), + this.getDisplayName(), NetworkUtils.extractDomain(cookie.getURL())))); } } @@ -585,11 +580,12 @@ final class ExtractSafari extends Extract { * an artifact with the bookmark information. * * @param bbartifacts BlackboardArtifact list to add new the artifacts to - * @param origFile The origFile Bookmark.plist file from the case - * @param root NSDictionary object to parse + * @param origFile The origFile Bookmark.plist file from the case + * @param root NSDictionary object to parse + * * @throws TskCoreException */ - private void parseBookmarkDictionary(Collection bbartifacts, AbstractFile origFile, NSDictionary root, IngestJobContext context) throws TskCoreException { + private void parseBookmarkDictionary(Collection bbartifacts, AbstractFile origFile, NSDictionary root) throws TskCoreException { if (context.dataSourceIngestIsCancelled()) { return; @@ -600,7 +596,7 @@ final class ExtractSafari extends Extract { if (children != null) { for (NSObject obj : children.getArray()) { - parseBookmarkDictionary(bbartifacts, origFile, (NSDictionary) obj, context); + parseBookmarkDictionary(bbartifacts, origFile, (NSDictionary) obj); } } } else if (root.containsKey(PLIST_KEY_URL)) { @@ -621,23 +617,25 @@ final class ExtractSafari extends Extract { } if (url != null || title != null) { - bbartifacts.add(createArtifactWithAttributes(TSK_WEB_BOOKMARK, origFile, - createBookmarkAttributes(url, - title, - null, - getName(), + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, origFile, + createBookmarkAttributes(url, + title, + null, + getDisplayName(), NetworkUtils.extractDomain(url)))); } } } - + /** * Parse the NSDictionary object that represents one download. * * @param origFile Download.plist file from the case - * @param entry One NSDictionary Object that represents one download - * instance + * @param entry One NSDictionary Object that represents one download + * instance + * * @return a Blackboard Artifact for the download. + * * @throws TskCoreException */ private Collection parseDownloadDictionary(Content dataSource, AbstractFile origFile, NSDictionary entry) throws TskCoreException { @@ -646,7 +644,7 @@ final class ExtractSafari extends Extract { String path = null; Long time = null; Long pathID = null; - + NSString nsstring = (NSString) entry.get(PLIST_KEY_DOWNLOAD_URL); if (nsstring != null) { url = nsstring.toString(); @@ -663,16 +661,16 @@ final class ExtractSafari extends Extract { time = date.getDate().getTime(); } - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(TSK_WEB_DOWNLOAD, origFile, createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getName())); + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, origFile, createDownloadAttributes(path, pathID, url, time, NetworkUtils.extractDomain(url), getDisplayName())); bbartifacts.add(webDownloadArtifact); - + // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, + for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(path), FilenameUtils.getPath(path))) { bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); break; } - + return bbartifacts; } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java index 50c2c6afd4..8940b46ca9 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractSru.java @@ -59,14 +59,8 @@ final class ExtractSru extends Extract { private static final Logger logger = Logger.getLogger(ExtractSru.class.getName()); - private IngestJobContext context; - private static final String APPLICATION_USAGE_SOURCE_NAME = "System Resource Usage - Application Usage"; //NON-NLS private static final String NETWORK_USAGE_SOURCE_NAME = "System Resource Usage - Network Usage"; - -// private static final String ARTIFACT_ATTRIBUTE_NAME = "TSK_ARTIFACT_NAME"; //NON-NLS - private static final String MODULE_NAME = "extractSRU"; //NON-NLS - private static final String SRU_TOOL_FOLDER = "markmckinnon"; //NON-NLS private static final String SRU_TOOL_NAME_WINDOWS_32 = "Export_Srudb_32.exe"; //NON-NLS private static final String SRU_TOOL_NAME_WINDOWS_64 = "Export_Srudb_64.exe"; //NON-NLS @@ -76,12 +70,14 @@ final class ExtractSru extends Extract { private static final String SRU_ERROR_FILE_NAME = "Error.txt"; //NON-NLS private static final Map applicationFilesFound = new HashMap<>(); + private final IngestJobContext context; @Messages({ - "ExtractSru_module_name=System Resource Usage Extractor" + "ExtractSru_module_name=System Resource Usage Analyzer" }) - ExtractSru() { - super(Bundle.ExtractSru_module_name()); + ExtractSru(IngestJobContext context) { + super(Bundle.ExtractSru_module_name(), context); + this.context = context; } @Messages({ @@ -90,9 +86,7 @@ final class ExtractSru extends Extract { }) @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { - - this.context = context; + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { String modOutPath = Case.getCurrentCase().getModuleDirectory() + File.separator + "sru"; File dir = new File(modOutPath); @@ -351,23 +345,23 @@ final class ExtractSru extends Extract { Collection bbattributes = Arrays.asList( new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getDisplayName(), formattedApplicationName),//NON-NLS new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getDisplayName(), userName), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getDisplayName(), executionTime), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_SENT, getName(), bytesSent), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_SENT, getDisplayName(), bytesSent), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_RECEIVED, getName(), bytesRecvd), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_BYTES_RECEIVED, getDisplayName(), bytesRecvd), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), NETWORK_USAGE_SOURCE_NAME)); + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getDisplayName(), NETWORK_USAGE_SOURCE_NAME)); try { - BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, sruAbstractFile, bbattributes); + BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_PROG_RUN, sruAbstractFile, bbattributes); bba.add(bbart); BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); if (associateBbArtifact != null) { @@ -382,7 +376,7 @@ final class ExtractSru extends Extract { logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bba); } } @@ -411,19 +405,19 @@ final class ExtractSru extends Extract { Collection bbattributes = Arrays.asList( new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getDisplayName(), formattedApplicationName),//NON-NLS new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_USER_NAME, getDisplayName(), userName), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getName(), + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, getDisplayName(), executionTime), new BlackboardAttribute( - BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getName(), APPLICATION_USAGE_SOURCE_NAME)); + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, getDisplayName(), APPLICATION_USAGE_SOURCE_NAME)); try { - BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN, sruAbstractFile, bbattributes); + BlackboardArtifact bbart = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_PROG_RUN, sruAbstractFile, bbattributes); bba.add(bbart); BlackboardArtifact associateBbArtifact = createAssociatedArtifact(applicationName.toLowerCase(), bbart); if (associateBbArtifact != null) { @@ -438,10 +432,10 @@ final class ExtractSru extends Extract { logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bba); } - + } /** diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java index 50cf7ce90b..7a39033ad3 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractWebAccountType.java @@ -43,9 +43,11 @@ import org.sleuthkit.datamodel.TskCoreException; class ExtractWebAccountType extends Extract { private static final Logger logger = Logger.getLogger(ExtractWebAccountType.class.getName()); + private final IngestJobContext context; - ExtractWebAccountType() { - super(NbBundle.getMessage(ExtractWebAccountType.class, "ExtractWebAccountType.moduleName.text")); + ExtractWebAccountType(IngestJobContext context) { + super(NbBundle.getMessage(ExtractWebAccountType.class, "ExtractWebAccountType.moduleName.text"), context); + this.context = context; } private static final List QUERY_ARTIFACTS = Arrays.asList( @@ -53,7 +55,7 @@ class ExtractWebAccountType extends Extract { new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT) ); - private void extractDomainRoles(Content dataSource, IngestJobContext context) { + private void extractDomainRoles(Content dataSource) { try { // Get web history blackboard artifacts Collection listArtifacts = currentCase.getSleuthkitCase().getBlackboard().getArtifacts( @@ -84,7 +86,7 @@ class ExtractWebAccountType extends Extract { /** * Extract and store any role found in the given artifact. * - * @param artifact The original artifact + * @param artifact The original artifact * @param roleProcessor Object to collect and process domain roles. * * @throws TskCoreException @@ -109,21 +111,21 @@ class ExtractWebAccountType extends Extract { roleFound = findPhpBbRole(url, domain, artifact, roleProcessor) || roleFound; roleFound = findJoomlaRole(url, domain, artifact, roleProcessor) || roleFound; roleFound = findWordPressRole(url, domain, artifact, roleProcessor) || roleFound; - + // if no other role for this url was found and it is a TSK_SERVICE_ACCOUNT, add a general user role. if (!roleFound && artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT.getTypeID()) { roleProcessor.addRole(domain, domain, Role.USER, url, artifact); } } - - + /** * Extract myBB role. * - * @param url The full URL. - * @param domain The domain. - * @param artifact The original artifact. + * @param url The full URL. + * @param domain The domain. + * @param artifact The original artifact. * @param roleProcessor Object to collect and process domain roles. + * * @return True if a myBB role is found. */ private boolean findMyBbRole(String url, String domain, BlackboardArtifact artifact, RoleProcessor roleProcessor) { @@ -146,10 +148,11 @@ class ExtractWebAccountType extends Extract { /** * Extract phpBB role. * - * @param url The full URL. - * @param domain The domain. - * @param artifact The original artifact. + * @param url The full URL. + * @param domain The domain. + * @param artifact The original artifact. * @param roleProcessor Object to collect and process domain roles. + * * @return True if a phpBB role is found. */ private boolean findPhpBbRole(String url, String domain, BlackboardArtifact artifact, RoleProcessor roleProcessor) { @@ -172,10 +175,11 @@ class ExtractWebAccountType extends Extract { /** * Extract Joomla role. * - * @param url The full URL. - * @param domain The domain. - * @param artifact The original artifact. + * @param url The full URL. + * @param domain The domain. + * @param artifact The original artifact. * @param roleProcessor Object to collect and process domain roles. + * * @return True if a Joomla role is found. */ private boolean findJoomlaRole(String url, String domain, BlackboardArtifact artifact, RoleProcessor roleProcessor) { @@ -192,10 +196,11 @@ class ExtractWebAccountType extends Extract { /** * Extract WordPress role. * - * @param url The full URL. - * @param domain The domain. - * @param artifact The original artifact. + * @param url The full URL. + * @param domain The domain. + * @param artifact The original artifact. * @param roleProcessor Object to collect and process domain roles. + * * @return True if a WordPress role is found. */ private boolean findWordPressRole(String url, String domain, BlackboardArtifact artifact, RoleProcessor roleProcessor) { @@ -220,8 +225,8 @@ class ExtractWebAccountType extends Extract { } @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { - extractDomainRoles(dataSource, context); + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { + extractDomainRoles(dataSource); } /** @@ -245,10 +250,10 @@ class ExtractWebAccountType extends Extract { * domain/platform - The level of the role is higher than previously * seen for this domain/platform * - * @param domain The domain. + * @param domain The domain. * @param platform The probable platform for this role. - * @param role The role level. - * @param url The URL (stored for efficiency). + * @param role The role level. + * @param url The URL (stored for efficiency). * @param artifact The original blackboard artifact the URL came from. */ void addRole(String domain, String platform, Role role, String url, BlackboardArtifact artifact) { @@ -298,9 +303,9 @@ class ExtractWebAccountType extends Extract { NbBundle.getMessage(this.getClass(), "ExtractWebAccountType.parentModuleName"), role.getUrl())); - artifactList.add(createArtifactWithAttributes(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE, file, bbattributes)); + artifactList.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_ACCOUNT_TYPE, file, bbattributes)); } - + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(artifactList); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java index 5cc6633205..0d866195a6 100755 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractZoneIdentifier.java @@ -54,15 +54,22 @@ final class ExtractZoneIdentifier extends Extract { private static final String ZONE_IDENTIFIER_FILE = "%:Zone.Identifier"; //NON-NLS private static final String ZONE_IDENTIFIER = ":Zone.Identifier"; //NON-NLS private Content dataSource; + private final IngestJobContext context; @Messages({ + "ExtractZone_displayName= Zone Identifier Analyzer", "ExtractZone_process_errMsg_find=A failure occured while searching for :Zone.Indentifier files.", "ExtractZone_process_errMsg=An error occured processing ':Zone.Indentifier' files.", "ExtractZone_progress_Msg=Extracting :Zone.Identifer files" }) + ExtractZoneIdentifier(IngestJobContext context) { + super(Bundle.ExtractZone_displayName(), context); + this.context = context; + } + @Override - void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; progressBar.progress(Bundle.ExtractZone_progress_Msg()); @@ -100,7 +107,7 @@ final class ExtractZoneIdentifier extends Extract { } try { - processZoneFile(context, zoneFile, associatedObjectArtifacts, downloadArtifacts, knownPathIDs); + processZoneFile(zoneFile, associatedObjectArtifacts, downloadArtifacts, knownPathIDs); } catch (TskCoreException ex) { addErrorMessage(Bundle.ExtractZone_process_errMsg()); String message = String.format("Failed to process zone identifier file %s", zoneFile.getName()); //NON-NLS @@ -117,14 +124,13 @@ final class ExtractZoneIdentifier extends Extract { /** * Process a single Zone Identifier file. * - * @param context IngestJobContext * @param zoneFile Zone Identifier file * @param associatedObjectArtifacts List for TSK_ASSOCIATED_OBJECT artifacts * @param downloadArtifacts List for TSK_WEB_DOWNLOAD artifacts * * @throws TskCoreException */ - private void processZoneFile(IngestJobContext context, + private void processZoneFile( AbstractFile zoneFile, Collection associatedObjectArtifacts, Collection downloadArtifacts, Set knownPathIDs) throws TskCoreException { @@ -254,7 +260,7 @@ final class ExtractZoneIdentifier extends Extract { RecentActivityExtracterModuleFactory.getModuleName(), zoneInfo.getZoneIdAsString())); } - return createArtifactWithAttributes(TSK_WEB_DOWNLOAD, zoneFile, bbattributes); + return createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, zoneFile, bbattributes); } /** @@ -323,7 +329,7 @@ final class ExtractZoneIdentifier extends Extract { properties.load(new ReadContentInputStream(zoneFile)); } catch (IllegalArgumentException ex) { String message = String.format("Unable to parse Zone Id for File %s", fileName); //NON-NLS - LOG.log(Level.WARNING, message); + LOG.log(Level.WARNING, message); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 87bbb0232d..c3ff5017fe 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -1,19 +1,19 @@ - /* +/* * * Autopsy Forensic Browser - * + * * Copyright 2012-2021 Basis Technology Corp. - * + * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com * Project Contact/Architect: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -56,7 +56,6 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -90,67 +89,68 @@ class Firefox extends Extract { private static final String BOOKMARK_QUERY = "SELECT fk, moz_bookmarks.title, url, (moz_bookmarks.dateAdded/1000000) AS dateAdded FROM moz_bookmarks INNER JOIN moz_places ON moz_bookmarks.fk=moz_places.id"; //NON-NLS private static final String DOWNLOAD_QUERY = "SELECT target, source,(startTime/1000000) AS startTime, maxBytes FROM moz_downloads"; //NON-NLS private static final String DOWNLOAD_QUERY_V24 = "SELECT url, content AS target, (lastModified/1000000) AS lastModified " - + " FROM moz_places, moz_annos, moz_anno_attributes " - + " WHERE moz_places.id = moz_annos.place_id" - + " AND moz_annos.anno_attribute_id = moz_anno_attributes.id" - + " AND moz_anno_attributes.name='downloads/destinationFileURI'"; //NON-NLS + + " FROM moz_places, moz_annos, moz_anno_attributes " + + " WHERE moz_places.id = moz_annos.place_id" + + " AND moz_annos.anno_attribute_id = moz_anno_attributes.id" + + " AND moz_anno_attributes.name='downloads/destinationFileURI'"; //NON-NLS private static final String FORMHISTORY_QUERY = "SELECT fieldname, value FROM moz_formhistory"; private static final String FORMHISTORY_QUERY_V64 = "SELECT fieldname, value, timesUsed, firstUsed, lastUsed FROM moz_formhistory"; private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; - Firefox() { - super(NbBundle.getMessage(Firefox.class, "Firefox.moduleName")); + Firefox(IngestJobContext context) { + super(NbBundle.getMessage(Firefox.class, "Firefox.moduleName"), context); + this.context = context; } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; dataFound = false; long ingestJobId = context.getJobId(); - + progressBar.progress(Bundle.Progress_Message_Firefox_History()); this.getHistory(context.getJobId()); - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Firefox_Bookmarks()); this.getBookmark(ingestJobId); - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Firefox_Downloads()); this.getDownload(ingestJobId); - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Firefox_Cookies()); this.getCookie(ingestJobId); - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Firefox_FormHistory()); this.getFormsHistory(ingestJobId); - + if (context.dataSourceIngestIsCancelled()) { return; } - + progressBar.progress(Bundle.Progress_Message_Firefox_AutoFill()); this.getAutofillProfiles(ingestJobId); } /** * Get Firefox history. + * * @param ingestJobId The ingest job id. */ private void getHistory(long ingestJobId) { @@ -161,7 +161,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errFetchingFiles"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -175,11 +175,11 @@ class Firefox extends Extract { Collection bbartifacts = new ArrayList<>(); int j = 0; for (AbstractFile historyFile : historyFiles) { - + if (context.dataSourceIngestIsCancelled()) { return; } - + if (historyFile.getSize() == 0) { continue; } @@ -192,14 +192,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox web history artifacts file '%s' (id=%d).", fileName, historyFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox web history artifacts file '%s' (id=%d).", temps, fileName, historyFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } @@ -208,16 +208,16 @@ class Firefox extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, HISTORY_QUERY); - logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, HISTORY_QUERY); + logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { return; } - + String url = result.get("url").toString(); - + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, RecentActivityExtracterModuleFactory.getModuleName(), @@ -238,12 +238,12 @@ class Firefox extends Extract { String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_HISTORY, historyFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_HISTORY artifact for file %d", historyFile.getId()), ex); } @@ -252,13 +252,14 @@ class Firefox extends Extract { dbFile.delete(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } /** * Queries for bookmark files and adds artifacts + * * @param ingestJobId The ingest job id. */ private void getBookmark(long ingestJobId) { @@ -270,7 +271,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getBookmark.errMsg.errFetchFiles"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -294,14 +295,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox bookmark artifacts file '%s' (id=%d).", fileName, bookmarkFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox bookmark artifacts file '%s' (id=%d).", temps, fileName, bookmarkFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getBookmark.errMsg.errAnalyzeFile", - this.getName(), fileName)); + this.getDisplayName(), fileName)); continue; } File dbFile = new File(temps); @@ -309,14 +310,14 @@ class Firefox extends Extract { dbFile.delete(); break; } - List> tempList = this.dbConnect(temps, BOOKMARK_QUERY); - logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, BOOKMARK_QUERY); + logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { break; } - + String url = result.get("url").toString(); Collection bbattributes = new ArrayList<>(); @@ -337,11 +338,11 @@ class Firefox extends Extract { String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_BOOKMARK, bookmarkFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_BOOKMARK artifact for file %d", bookmarkFile.getId()), ex); } @@ -350,13 +351,14 @@ class Firefox extends Extract { dbFile.delete(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } /** * Queries for cookies file and adds artifacts + * * @param ingestJobId The ingest job id. */ private void getCookie(long ingestJobId) { @@ -367,7 +369,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getCookie.errMsg.errFetchFile"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -383,7 +385,7 @@ class Firefox extends Extract { if (context.dataSourceIngestIsCancelled()) { return; } - + if (cookiesFile.getSize() == 0) { continue; } @@ -395,14 +397,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox cookie artifacts file '%s' (id=%d).", fileName, cookiesFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox cookie artifacts file '%s' (id=%d).", temps, fileName, cookiesFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getCookie.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getCookie.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } @@ -419,14 +421,14 @@ class Firefox extends Extract { query = COOKIE_QUERY_V3; } - List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, query); + logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { break; } - + String host = result.get("host").toString(); Collection bbattributes = new ArrayList<>(); @@ -448,18 +450,18 @@ class Firefox extends Extract { if (checkColumn == true) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - RecentActivityExtracterModuleFactory.getModuleName(), - (Long.valueOf(result.get("creationTime").toString())))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("creationTime").toString())))); //NON-NLS } String domain = extractDomain(host); if (domain != null && domain.isEmpty() == false) { domain = domain.replaceFirst("^\\.+(?!$)", ""); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); + RecentActivityExtracterModuleFactory.getModuleName(), domain)); } try { - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_COOKIE, cookiesFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_COOKIE, cookiesFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_COOKIE artifact for file %d", cookiesFile.getId()), ex); } @@ -475,6 +477,7 @@ class Firefox extends Extract { /** * Queries for downloads files and adds artifacts + * * @param ingestJobId The ingest job id. */ private void getDownload(long ingestJobId) { @@ -486,6 +489,7 @@ class Firefox extends Extract { * Finds downloads artifacts from Firefox data from versions before 24.0. * * Downloads were stored in a separate downloads database. + * * @param ingestJobId The ingest job id. */ private void getDownloadPreVersion24(long ingestJobId) { @@ -497,7 +501,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getDlPre24.errMsg.errFetchFiles"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -522,14 +526,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox download artifacts file '%s' (id=%d).", fileName, downloadsFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox download artifacts file '%s' (id=%d).", temps, fileName, downloadsFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getDlPre24.errMsg.errAnalyzeFiles", - this.getName(), fileName)); + this.getDisplayName(), fileName)); continue; } File dbFile = new File(temps); @@ -538,14 +542,14 @@ class Firefox extends Extract { break; } - List> tempList = this.dbConnect(temps, DOWNLOAD_QUERY); - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + List> tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY); + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { break; } - + String source = result.get("source").toString(); Collection bbattributes = new ArrayList<>(); @@ -555,7 +559,7 @@ class Firefox extends Extract { source)); //NON-NLS //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", ((result.get("source").toString() != null) ? EscapeUtil.decodeURL(result.get("source").toString()) : ""))); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), + RecentActivityExtracterModuleFactory.getModuleName(), (Long.valueOf(result.get("startTime").toString())))); //NON-NLS String target = result.get("target").toString(); //NON-NLS @@ -588,11 +592,11 @@ class Firefox extends Extract { domain)); //NON-NLS } try { - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); bbartifacts.add(webDownloadArtifact); // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, + for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); break; @@ -601,18 +605,18 @@ class Firefox extends Extract { logger.log(Level.SEVERE, String.format("Error creating TSK_WEB_DOWNLOAD or TSK_ASSOCIATED_ARTIFACT artifact for file '%d'", downloadsFile.getId()), ex); //NON-NLS } - + } if (errors > 0) { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "Firefox.getDlPre24.errMsg.errParsingArtifacts", - this.getName(), errors)); + this.getDisplayName(), errors)); } j++; dbFile.delete(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } @@ -621,6 +625,7 @@ class Firefox extends Extract { * Gets download artifacts from Firefox data from version 24. * * Downloads are stored in the places database. + * * @param ingestJobId The ingest job id. */ private void getDownloadVersion24(long ingestJobId) { @@ -631,7 +636,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getDlV24.errMsg.errFetchFiles"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -656,14 +661,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox download artifacts file '%s' (id=%d).", fileName, downloadsFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getHistory.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox download artifacts file '%s' (id=%d).", temps, fileName, downloadsFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getDlV24.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getDlV24.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } @@ -673,17 +678,17 @@ class Firefox extends Extract { break; } - List> tempList = this.dbConnect(temps, DOWNLOAD_QUERY_V24); + List> tempList = this.querySQLiteDb(temps, DOWNLOAD_QUERY_V24); - logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getName(), temps, tempList.size()}); //NON-NLS + logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), temps, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { break; } - + String url = result.get("url").toString(); - + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL, @@ -721,14 +726,14 @@ class Firefox extends Extract { String domain = extractDomain(url); if (domain != null && domain.isEmpty() == false) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN, - RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), domain)); //NON-NLS } try { - BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); + BlackboardArtifact webDownloadArtifact = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_DOWNLOAD, downloadsFile, bbattributes); bbartifacts.add(webDownloadArtifact); // find the downloaded file and create a TSK_ASSOCIATED_OBJECT for it, associating it with the TSK_WEB_DOWNLOAD artifact. - for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, + for (AbstractFile downloadedFile : currentCase.getSleuthkitCase().getFileManager().findFilesExactNameExactPath(dataSource, FilenameUtils.getName(downloadedFilePath), FilenameUtils.getPath(downloadedFilePath))) { bbartifacts.add(createAssociatedArtifact(downloadedFile, webDownloadArtifact)); break; @@ -736,42 +741,43 @@ class Firefox extends Extract { } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Error creating associated object artifact for file '%s'", downloadedFilePath), ex); //NON-NLS - } + } } if (errors > 0) { this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getDlV24.errMsg.errParsingArtifacts", - this.getName(), errors)); + this.getDisplayName(), errors)); } j++; dbFile.delete(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } - + /** - * Gets data from formshistory.sqlite database. - * Parses and creates artifacts. + * Gets data from formshistory.sqlite database. Parses and creates + * artifacts. + * * @param ingestJobId The ingest job id. */ private void getFormsHistory(long ingestJobId) { FileManager fileManager = currentCase.getServices().getFileManager(); List formHistoryFiles; - + // Some fields are just noisy and can me excluded Set excludedFieldNames = new HashSet<>(Arrays.asList( - "it", // some kind of timestamp - "ts" // some kind of timestamp - )); - + "it", // some kind of timestamp + "ts" // some kind of timestamp + )); + try { formHistoryFiles = fileManager.findFiles(dataSource, "formhistory.sqlite", "Firefox"); //NON-NLS } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getFormsAutofill.errMsg.errFetchingFiles"); logger.log(Level.WARNING, msg); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -797,14 +803,14 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox web history artifacts file '%s' (id=%d).", fileName, formHistoryFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getFormsAutofill.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getFormsAutofill.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp sqlite db file '%s' for Firefox web history artifacts file '%s' (id=%d).", tempFilePath, fileName, formHistoryFile.getId()), ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getFormsAutofill.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getFormsAutofill.errMsg.errAnalyzeFile", this.getDisplayName(), fileName)); continue; } @@ -813,53 +819,53 @@ class Firefox extends Extract { dbFile.delete(); break; } - + // The table schema is a little different in newer version of Firefox boolean isFirefoxV64 = Util.checkColumn("timesUsed", "moz_formhistory", tempFilePath); String formHistoryQuery = (isFirefoxV64) ? FORMHISTORY_QUERY_V64 : FORMHISTORY_QUERY; - - List> tempList = this.dbConnect(tempFilePath, formHistoryQuery); - logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getName(), tempFilePath, tempList.size()}); //NON-NLS + + List> tempList = this.querySQLiteDb(tempFilePath, formHistoryQuery); + logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{getDisplayName(), tempFilePath, tempList.size()}); //NON-NLS for (HashMap result : tempList) { - + if (context.dataSourceIngestIsCancelled()) { break; } - + Collection bbattributes = new ArrayList<>(); - + String fieldName = ((result.get("fieldname").toString() != null) ? result.get("fieldname").toString() : ""); // filter out unuseful values if (excludedFieldNames.contains(fieldName.toLowerCase())) { continue; } - + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, RecentActivityExtracterModuleFactory.getModuleName(), fieldName)); //NON-NLS - + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_VALUE, RecentActivityExtracterModuleFactory.getModuleName(), ((result.get("value").toString() != null) ? result.get("value").toString() : ""))); //NON-NLS - + // Newer versions of firefox have additional columns if (isFirefoxV64) { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, - RecentActivityExtracterModuleFactory.getModuleName(), - (Long.valueOf(result.get("firstUsed").toString()) / 1000000))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("firstUsed").toString()) / 1000000))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - RecentActivityExtracterModuleFactory.getModuleName(), - (Long.valueOf(result.get("lastUsed").toString()) / 1000000))); //NON-NLS + RecentActivityExtracterModuleFactory.getModuleName(), + (Long.valueOf(result.get("lastUsed").toString()) / 1000000))); //NON-NLS bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_COUNT, - RecentActivityExtracterModuleFactory.getModuleName(), - (Integer.valueOf(result.get("timesUsed").toString())))); //NON-NLS - + RecentActivityExtracterModuleFactory.getModuleName(), + (Integer.valueOf(result.get("timesUsed").toString())))); //NON-NLS + } try { // Add artifact - bbartifacts.add(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, formHistoryFile, bbattributes)); + bbartifacts.add(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_FORM_AUTOFILL, formHistoryFile, bbattributes)); } catch (TskCoreException ex) { logger.log(Level.SEVERE, String.format("Failed to create TSK_WEB_FORM_AUTOFILL artifact for file %d", formHistoryFile.getId()), ex); } @@ -868,15 +874,15 @@ class Firefox extends Extract { dbFile.delete(); } - if(!context.dataSourceIngestIsCancelled()) { + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } - - + /** - * Gets data from autofill-profiles.json file. - * Parses file and makes artifacts. + * Gets data from autofill-profiles.json file. Parses file and makes + * artifacts. + * * @param ingestJobId The ingest job id. */ private void getAutofillProfiles(long ingestJobId) { @@ -887,7 +893,7 @@ class Firefox extends Extract { } catch (TskCoreException ex) { String msg = NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errGettingFiles"); logger.log(Level.SEVERE, msg, ex); - this.addErrorMessage(this.getName() + ": " + msg); + this.addErrorMessage(this.getDisplayName() + ": " + msg); return; } @@ -898,7 +904,6 @@ class Firefox extends Extract { dataFound = true; int j = 0; - while (j < autofillProfilesFiles.size()) { AbstractFile profileFile = autofillProfilesFiles.get(j++); if (profileFile.getSize() == 0) { @@ -911,17 +916,17 @@ class Firefox extends Extract { logger.log(Level.WARNING, String.format("Error reading Firefox Autofill profiles artifacts file '%s' (id=%d).", profileFile.getName(), profileFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzingFile", - this.getName(), profileFile.getName())); + this.getDisplayName(), profileFile.getName())); continue; } catch (IOException ex) { logger.log(Level.SEVERE, String.format("Error writing temp file '%s' for Firefox Autofill profiles file '%s' (id=%d).", temps, profileFile.getName(), profileFile.getId()), ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzingFile", - this.getName(), profileFile.getName())); + this.getDisplayName(), profileFile.getName())); continue; } - logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getName(), temps}); //NON-NLS + logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{getDisplayName(), temps}); //NON-NLS File dbFile = new File(temps); if (context.dataSourceIngestIsCancelled()) { dbFile.delete(); @@ -934,23 +939,23 @@ class Firefox extends Extract { } catch (FileNotFoundException ex) { logger.log(Level.SEVERE, "Error while trying to read the Autofill profiles json file for Firefox.", ex); //NON-NLS this.addErrorMessage( - NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzeFile", this.getName(), + NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzeFile", this.getDisplayName(), profileFile.getName())); continue; } final JsonParser parser = new JsonParser(); - + JsonObject jsonRootObject; JsonArray jAddressesArray; - + try { jsonRootObject = parser.parse(tempReader).getAsJsonObject(); jAddressesArray = jsonRootObject.getAsJsonArray("addresses"); //NON-NLS } catch (JsonIOException | JsonSyntaxException | IllegalStateException ex) { logger.log(Level.WARNING, "Error parsing Json for Firefox Autofill profiles.", ex); //NON-NLS this.addErrorMessage(NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzingFile3", - this.getName(), profileFile.getName())); + this.getDisplayName(), profileFile.getName())); continue; } @@ -966,142 +971,144 @@ class Firefox extends Extract { logger.log(Level.SEVERE, "No case open, bailing.", ex); //NON-NLS return; } - + for (JsonElement result : jAddressesArray) { JsonObject address = result.getAsJsonObject(); if (address == null) { continue; } - + JsonElement nameEl = address.get("name"); //NON-NLS String name = (nameEl != null) ? nameEl.getAsString() : ""; - + JsonElement emailEl = address.get("email"); //NON-NLS String email = (emailEl != null) ? emailEl.getAsString() : ""; - + JsonElement telEl = address.get("tel"); //NON-NLS String tel = (telEl != null) ? telEl.getAsString() : ""; JsonElement telCountryCodeEl = address.get("tel-country-code"); //NON-NLS String telCountryCode = (telCountryCodeEl != null) ? telCountryCodeEl.getAsString() : ""; JsonElement telNationalEl = address.get("tel-national"); //NON-NLS String telNational = (telNationalEl != null) ? telNationalEl.getAsString() : ""; - + String phoneNumber = makeTelNumber(tel, telCountryCode, telNational); JsonElement createdEl = address.get("timeCreated"); //NON-NLS - Long datetimeCreated = (createdEl != null) ? createdEl.getAsLong()/1000 : Long.valueOf(0); + Long datetimeCreated = (createdEl != null) ? createdEl.getAsLong() / 1000 : Long.valueOf(0); JsonElement lastusedEl = address.get("timeLastUsed"); //NON-NLS - Long datetimeLastUsed = (lastusedEl != null) ? lastusedEl.getAsLong()/1000 : Long.valueOf(0); + Long datetimeLastUsed = (lastusedEl != null) ? lastusedEl.getAsLong() / 1000 : Long.valueOf(0); JsonElement timesUsedEl = address.get("timesUsed"); //NON-NLS - Integer timesUsed = (timesUsedEl != null) ? timesUsedEl.getAsShort() : Integer.valueOf(0); - + Integer timesUsed = (timesUsedEl != null) ? timesUsedEl.getAsShort() : Integer.valueOf(0); + JsonElement addressLine1El = address.get("address-line1"); //NON-NLS String addressLine1 = (addressLine1El != null) ? addressLine1El.getAsString() : ""; JsonElement addressLine2El = address.get("address-line2"); //NON-NLS String addressLine2 = (addressLine2El != null) ? addressLine2El.getAsString() : ""; JsonElement addressLine3El = address.get("address-line3"); //NON-NLS String addressLine3 = (addressLine3El != null) ? addressLine3El.getAsString() : ""; - + JsonElement postalCodeEl = address.get("postal-code"); //NON-NLS String postalCode = (postalCodeEl != null) ? postalCodeEl.getAsString() : ""; JsonElement countryEl = address.get("country"); //NON-NLS String country = (countryEl != null) ? countryEl.getAsString() : ""; - - String mailingAddress = makeFullAddress(addressLine1, addressLine2, addressLine3, postalCode, country ); - + + String mailingAddress = makeFullAddress(addressLine1, addressLine2, addressLine3, postalCode, country); + try { - helper.addWebFormAddress(name, email, phoneNumber, + helper.addWebFormAddress(name, email, phoneNumber, mailingAddress, datetimeCreated, datetimeLastUsed, timesUsed); } catch (TskCoreException | Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Error while trying to insert Firefox Autofill profile artifact{0}", ex); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "Firefox.getAutofillProfiles.errMsg.errAnalyzingFile4", - this.getName(), profileFile.getName())); + this.getDisplayName(), profileFile.getName())); } } dbFile.delete(); } } - + /** * Extract the domain from the supplied URL. This method does additional * checks to detect invalid URLs. - * + * * @param url The URL from which to extract the domain. - * + * * @return The domain. */ private String extractDomain(String url) { if (url == null || url.isEmpty()) { return url; } - + if (url.toLowerCase().startsWith(PLACE_URL_PREFIX)) { /* * Ignore URLs that begin with the matched text. */ return null; } - + return NetworkUtils.extractDomain(url); } - - + /** - * Returns a phone number based on input number or components of phone number. - * - * @param tel full number, if available + * Returns a phone number based on input number or components of phone + * number. + * + * @param tel full number, if available * @param telCountryCode country code - * @param telNational full national number - * - * @return phone number, or an empty string if no number can be deciphered from input + * @param telNational full national number + * + * @return phone number, or an empty string if no number can be deciphered + * from input */ private String makeTelNumber(String tel, String telCountryCode, String telNational) { - + if (tel != null && !tel.isEmpty()) { return tel; } - - if ((telCountryCode != null && !telCountryCode.isEmpty()) && - (telNational != null && !telNational.isEmpty())) { + + if ((telCountryCode != null && !telCountryCode.isEmpty()) + && (telNational != null && !telNational.isEmpty())) { return telCountryCode + telNational; } - + return ""; } - - /** + + /** * Returns a full postal address from multiple address fields. - * + * * @parm addressLine1 * @parm addressLine2 * @parm addressLine3 * @parm postalCode * @parm country - * + * * @return full address */ - private String makeFullAddress(String addressLine1, String addressLine2, String addressLine3, String postalCode, String country ) { + private String makeFullAddress(String addressLine1, String addressLine2, String addressLine3, String postalCode, String country) { String fullAddress = ""; - fullAddress = appendAddressField(fullAddress, addressLine1 ); - fullAddress = appendAddressField(fullAddress, addressLine2 ); - fullAddress = appendAddressField(fullAddress, addressLine3 ); - fullAddress = appendAddressField(fullAddress, postalCode ); - fullAddress = appendAddressField(fullAddress, country ); + fullAddress = appendAddressField(fullAddress, addressLine1); + fullAddress = appendAddressField(fullAddress, addressLine2); + fullAddress = appendAddressField(fullAddress, addressLine3); + fullAddress = appendAddressField(fullAddress, postalCode); + fullAddress = appendAddressField(fullAddress, country); return fullAddress; } - + /** - * Appends the given address field to given address, if not empty. - * Adds delimiter in between if needed. - * + * Appends the given address field to given address, if not empty. Adds + * delimiter in between if needed. + * * @param address * @param addressfield + * * @return updated address */ private String appendAddressField(String address, String addressfield) { - + String updatedAddress = address; if (addressfield != null && !addressfield.isEmpty()) { if (!updatedAddress.isEmpty()) { @@ -1109,8 +1116,8 @@ class Firefox extends Extract { } updatedAddress += addressfield; } - + return updatedAddress; } - -} \ No newline at end of file + +} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index bf64688408..dd528087dd 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -2,7 +2,7 @@ * * Autopsy Forensic Browser * - * Copyright 2012-2019 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com @@ -32,7 +32,6 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.coreutils.TimeStampUtils; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -41,7 +40,6 @@ import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.datamodel.Content; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestJobContext; -import org.sleuthkit.datamodel.DataSource; import org.sleuthkit.datamodel.SleuthkitCase; /** @@ -55,9 +53,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { private final List browserExtractors = new ArrayList<>(); private final IngestServices services = IngestServices.getInstance(); private IngestJobContext context; - private final StringBuilder subCompleted = new StringBuilder(); protected SleuthkitCase tskCase; - private RAOsAccountCache accountCache = new RAOsAccountCache(); RAImageIngestModule() { } @@ -68,25 +64,25 @@ public final class RAImageIngestModule implements DataSourceIngestModule { tskCase = Case.getCurrentCase().getSleuthkitCase(); - Extract iexplore = new ExtractIE(); - Extract edge = new ExtractEdge(); - Extract registry = new ExtractRegistry(); - Extract recentDocuments = new RecentDocumentsByLnk(); - Extract chrome = new Chromium(); - Extract firefox = new Firefox(); - Extract SEUQA = new SearchEngineURLQueryAnalyzer(); - Extract osExtract = new ExtractOs(); - Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer(); - Extract safari = new ExtractSafari(); - Extract zoneInfo = new ExtractZoneIdentifier(); - Extract recycleBin = new ExtractRecycleBin(); - Extract sru = new ExtractSru(); - Extract prefetch = new ExtractPrefetch(); - Extract webAccountType = new ExtractWebAccountType(); - Extract messageDomainType = new DomainCategoryRunner(); - Extract jumpList = new ExtractJumpLists(); + Extract iexplore = new ExtractIE(context); + Extract edge = new ExtractEdge(context); + Extract registry = new ExtractRegistry(context); + Extract recentDocuments = new RecentDocumentsByLnk(context); + Extract chrome = new Chromium(context); + Extract firefox = new Firefox(context); + Extract SEUQA = new SearchEngineURLQueryAnalyzer(context); + Extract osExtract = new ExtractOs(context); + Extract dataSourceAnalyzer = new DataSourceUsageAnalyzer(context); + Extract safari = new ExtractSafari(context); + Extract zoneInfo = new ExtractZoneIdentifier(context); + Extract recycleBin = new ExtractRecycleBin(context); + Extract sru = new ExtractSru(context); + Extract prefetch = new ExtractPrefetch(context); + Extract webAccountType = new ExtractWebAccountType(context); + Extract messageDomainType = new DomainCategoryRunner(context); + Extract jumpList = new ExtractJumpLists(context); - extractors.add(recycleBin); + extractors.add(recycleBin); extractors.add(jumpList); extractors.add(recentDocuments); extractors.add(registry); // needs to run before the DataSourceUsageAnalyzer @@ -111,7 +107,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { browserExtractors.add(safari); for (Extract extractor : extractors) { - extractor.init(); + extractor.startUp(); } } @@ -129,23 +125,17 @@ public final class RAImageIngestModule implements DataSourceIngestModule { for (int i = 0; i < extractors.size(); i++) { Extract extracter = extractors.get(i); if (context.dataSourceIngestIsCancelled()) { - logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS + logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getDisplayName()); //NON-NLS break; } - progressBar.progress(extracter.getName(), i); + progressBar.progress(extracter.getDisplayName(), i); try { - extracter.process(dataSource, context, progressBar, accountCache); - if (extracter instanceof ExtractRegistry) { - accountCache.initialize(tskCase, ((DataSource) dataSource).getHost()); - } + extracter.process(dataSource, progressBar); } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); //NON-NLS - subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed", - extracter.getName())); - errors.add( - NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName())); + logger.log(Level.SEVERE, "Exception occurred in " + extracter.getDisplayName(), ex); //NON-NLS + errors.add(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName())); } progressBar.progress(i + 1); errors.addAll(extracter.getErrorMessages()); @@ -185,7 +175,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule { historyMsg.append( NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.title", dataSource.getName())); for (Extract module : browserExtractors) { - historyMsg.append("
  • ").append(module.getName()); //NON-NLS + historyMsg.append("
  • ").append(module.getDisplayName()); //NON-NLS historyMsg.append(": ").append((module.foundData()) ? NbBundle .getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.found") : NbBundle .getMessage(this.getClass(), "RAImageIngestModule.process.histMsg.notFnd")); @@ -199,33 +189,31 @@ public final class RAImageIngestModule implements DataSourceIngestModule { historyMsg.toString()); services.postMessage(inboxMsg); - if (context.dataSourceIngestIsCancelled()) { - return ProcessResult.OK; - } + return ProcessResult.OK; + } + @Override + public void shutDown() { for (int i = 0; i < extractors.size(); i++) { Extract extracter = extractors.get(i); try { - extracter.complete(); + extracter.shutDown(); } catch (Exception ex) { - logger.log(Level.SEVERE, "Exception occurred when completing " + extracter.getName(), ex); //NON-NLS - subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.complete.errMsg.failed", - extracter.getName())); + logger.log(Level.SEVERE, "Exception occurred when completing " + extracter.getDisplayName(), ex); //NON-NLS } } - - return ProcessResult.OK; } /** * Makes a path of the format - * [basePath]/[RECENT_ACTIVITY_FOLDER]/[module]_[ingest job id] if it does not - * already exist and returns the created folder. + * [basePath]/[RECENT_ACTIVITY_FOLDER]/[module]_[ingest job id] if it does + * not already exist and returns the created folder. * - * @param basePath The base path (a case-related folder like temp or - * output). - * @param module The module name to include in the folder name. + * @param basePath The base path (a case-related folder like temp or + * output). + * @param module The module name to include in the folder name. * @param ingestJobId The id of the ingest job. + * * @return The path to the folder. */ private static String getAndMakeRAPath(String basePath, String module, long ingestJobId) { @@ -243,8 +231,8 @@ public final class RAImageIngestModule implements DataSourceIngestModule { * create the dir if it doesn't exist. * * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp - * folder to prevent name collisions + * @param mod Module name that will be used for a sub folder in the temp + * folder to prevent name collisions * * @return Path to directory */ @@ -257,8 +245,8 @@ public final class RAImageIngestModule implements DataSourceIngestModule { * create the dir if it doesn't exist. * * @param a_case Case that directory is for - * @param mod Module name that will be used for a sub folder in the temp - * folder to prevent name collisions + * @param mod Module name that will be used for a sub folder in the temp + * folder to prevent name collisions * * @return Path to directory */ diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java deleted file mode 100755 index c1206c82f6..0000000000 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAOsAccountCache.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Autopsy Forensic Browser - * - * Copyright 2021 Basis Technology Corp. - * Contact: carrier sleuthkit org - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.sleuthkit.autopsy.recentactivity; - -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.datamodel.AbstractFile; -import org.sleuthkit.datamodel.BlackboardAttribute; -import org.sleuthkit.datamodel.Host; -import org.sleuthkit.datamodel.OsAccount; -import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute; -import org.sleuthkit.datamodel.SleuthkitCase; -import org.sleuthkit.datamodel.TskCoreException; - -/** - * Cache of OsAccounts for a given host to be used by the various Recent - * Activity Extractors. - * - */ -final class RAOsAccountCache { - - private final Map accountCache = new HashMap<>(); - - /** - * initialize the account map for the given host. This should be done after - * the ExtractRegistry is run. - * - * @param tskCase - * @param host - * - * @throws TskCoreException - */ - void initialize(SleuthkitCase tskCase, Host host) throws TskCoreException { - buildAccountMap(tskCase, host); - } - - /** - * Returns the appropriate OsAccount for the given file. - * - * If the file is not associated with an OsAccount, try to find one based on - * the location of the file. - * - * If the file is associated with the system account of S-1-5-32-544 use the - * file path to determine which user account to associate the file with. - * - * - * @param file The file to match with appropriate OsAccount. - * - * @return Optional OsAccount, may not be present if one is not found. - * - * @throws TskCoreException - */ - Optional getOsAccount(AbstractFile file) throws TskCoreException { - Optional optional = file.getOsAccountObjectId(); - - if (!optional.isPresent()) { - return getAccountForPath(file.getParentPath()); - } - - OsAccount osAccount = Case.getCurrentCase().getSleuthkitCase().getOsAccountManager().getOsAccountByObjectId(optional.get()); - if (osAccount.getName().equals("S-1-5-32-544")) { - return getAccountForPath(file.getParentPath()); - } - - return Optional.ofNullable(osAccount); - } - - /** - * Return a user account if the given path's parent directory is a user - * account home directory. - * - * @param path Path to search. - * - * @return An Optional OsAccount if one was found. - */ - private Optional getAccountForPath(String path) { - Path filePath = Paths.get(path.toLowerCase()); - // Check if the path might be a user path. - if (filePath.startsWith(Paths.get("/users")) || filePath.startsWith("/document and settings")) { - for (String key : accountCache.keySet()) { - if (filePath.startsWith(Paths.get(key))) { - return Optional.of(accountCache.get(key)); - } - } - } - return Optional.empty(); - } - - /** - * Build a map of user home directories to OsAccounts for the given host. - * - * @throws TskCoreException - */ - private void buildAccountMap(SleuthkitCase tskCase, Host host) throws TskCoreException { - BlackboardAttribute.Type homeDir = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_HOME_DIR); - List accounts = tskCase.getOsAccountManager().getOsAccounts(host); - - for (OsAccount account : accounts) { - List attributeList = account.getExtendedOsAccountAttributes(); - - for (OsAccountAttribute attribute : attributeList) { - if (attribute.getHostId().isPresent() - && attribute.getHostId().get().equals(host.getHostId()) - && attribute.getAttributeType().equals(homeDir)) { - accountCache.put(attribute.getValueString(), account); - } - } - } - } -} diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index e8e7240fee..98351a09d4 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -1,19 +1,19 @@ - /* +/* * * Autopsy Forensic Browser - * + * * Copyright 2012-2021 Basis Technology Corp. - * + * * Copyright 2012 42six Solutions. * Contact: aebadirad 42six com * Project Contact/Architect: carrier sleuthkit org - * + * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,7 +38,6 @@ import org.sleuthkit.autopsy.coreutils.JLnkParserException; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.BlackboardArtifact; -import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; @@ -55,11 +54,16 @@ class RecentDocumentsByLnk extends Extract { private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName()); private Content dataSource; - private IngestJobContext context; - + private final IngestJobContext context; + @Messages({ "Progress_Message_Extract_Resent_Docs=Recent Documents", + "RecentDocumentsByLnk_displayName=Recent Documents by Link Analyzer" }) + RecentDocumentsByLnk(IngestJobContext context) { + super(Bundle.RecentDocumentsByLnk_displayName(), context); + this.context = context; + } /** * Find the documents that Windows stores about recent documents and make @@ -78,10 +82,10 @@ class RecentDocumentsByLnk extends Extract { logger.log(Level.WARNING, "Error searching for .lnk files."); //NON-NLS this.addErrorMessage( NbBundle.getMessage(this.getClass(), "RecentDocumentsByLnk.getRecDoc.errMsg.errGetLnkFiles", - this.getName())); + this.getDisplayName())); return; } - + if (recentFiles.isEmpty()) { logger.log(Level.INFO, "Didn't find any recent files."); //NON-NLS return; @@ -116,47 +120,48 @@ class RecentDocumentsByLnk extends Extract { String path = lnk.getBestPath(); if (recentFileMap.get(path + File.separator + recentFile.getName()) == null) { recentFileMap.put(path + File.separator + recentFile.getName(), recentFile.getName()); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, - NbBundle.getMessage(this.getClass(), - "RecentDocumentsByLnk.parentModuleName.noSpace"), - path)); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID, - NbBundle.getMessage(this.getClass(), - "RecentDocumentsByLnk.parentModuleName.noSpace"), - Util.findID(dataSource, path))); - bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, - NbBundle.getMessage(this.getClass(), - "RecentDocumentsByLnk.parentModuleName.noSpace"), - recentFile.getCrtime())); - try{ - BlackboardArtifact bba = createArtifactWithAttributes(ARTIFACT_TYPE.TSK_RECENT_OBJECT, recentFile, bbattributes); - if(bba != null) { - bbartifacts.add(bba); - bba = createAssociatedArtifact(path, bba); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH, + NbBundle.getMessage(this.getClass(), + "RecentDocumentsByLnk.parentModuleName.noSpace"), + path)); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID, + NbBundle.getMessage(this.getClass(), + "RecentDocumentsByLnk.parentModuleName.noSpace"), + Util.findID(dataSource, path))); + bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, + NbBundle.getMessage(this.getClass(), + "RecentDocumentsByLnk.parentModuleName.noSpace"), + recentFile.getCrtime())); + try { + BlackboardArtifact bba = createArtifactWithAttributes(BlackboardArtifact.Type.TSK_RECENT_OBJECT, recentFile, bbattributes); if (bba != null) { bbartifacts.add(bba); + bba = createAssociatedArtifact(path, bba); + if (bba != null) { + bbartifacts.add(bba); + } } + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", recentFile.getId()), ex); } - } catch(TskCoreException ex) { - logger.log(Level.SEVERE, String.format("Failed to create TSK_RECENT_OBJECT artifact for file %d", recentFile.getId()), ex); } } - } - + if (!context.dataSourceIngestIsCancelled()) { postArtifacts(bbartifacts); } } /** - * Create associated artifacts using file name and path and the artifact it associates with - * + * Create associated artifacts using file name and path and the artifact it + * associates with + * * @param filePathName file and path of object being associated with - * - * @param bba blackboard artifact to associate with - * - * @returnv BlackboardArtifact or a null value - */ + * + * @param bba blackboard artifact to associate with + * + * @returnv BlackboardArtifact or a null value + */ private BlackboardArtifact createAssociatedArtifact(String filePathName, BlackboardArtifact bba) { String normalizePathName = FilenameUtils.normalize(filePathName, true); String fileName = FilenameUtils.getName(normalizePathName); @@ -172,16 +177,15 @@ class RecentDocumentsByLnk extends Extract { } catch (TskCoreException ex) { logger.log(Level.WARNING, String.format("Error finding actual file %s. file may not exist", filePathName), ex); //NON-NLS } - + return null; } - + @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; dataFound = false; - + progressBar.progress(Bundle.Progress_Message_Extract_Resent_Docs()); this.getRecentDocuments(); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index b57717e648..4f901b19e1 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2012-2014 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -58,9 +58,7 @@ import org.xml.sax.SAXException; * search engines by querying the blackboard for web history and bookmark * artifacts, and extracting search text from them. * - * * To add search engines, edit SEUQAMappings.xml under RecentActivity - * */ @NbBundle.Messages({ "cannotBuildXmlParser=Unable to build XML parser: ", @@ -77,10 +75,11 @@ class SearchEngineURLQueryAnalyzer extends Extract { private static SearchEngineURLQueryAnalyzer.SearchEngine[] engines; private Content dataSource; - private IngestJobContext context; + private final IngestJobContext context; - SearchEngineURLQueryAnalyzer() { - super(NbBundle.getMessage(ExtractIE.class, "SearchEngineURLQueryAnalyzer.moduleName.text")); + SearchEngineURLQueryAnalyzer(IngestJobContext context) { + super(NbBundle.getMessage(ExtractIE.class, "SearchEngineURLQueryAnalyzer.moduleName.text"), context); + this.context = context; } /** @@ -134,7 +133,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { String getDomainSubstring() { return domainSubstring; } - + Pattern getDomainRegexPattern() { return domainRegexPattern; } @@ -213,7 +212,8 @@ class SearchEngineURLQueryAnalyzer extends Extract { * * @param domain domain as part of the URL * - * @return supported search engine(s) the domain belongs to (list may be empty) + * @return supported search engine(s) the domain belongs to (list may be + * empty) * */ private static Collection getSearchEngineFromUrl(String domain) { @@ -332,23 +332,23 @@ class SearchEngineURLQueryAnalyzer extends Extract { if (urlAttr == null) { continue; } - + final String urlString = urlAttr.getValueString(); Collection possibleSearchEngines = getSearchEngineFromUrl(urlString); for (SearchEngineURLQueryAnalyzer.SearchEngine se : possibleSearchEngines) { String query = extractSearchEngineQuery(se, urlString); // If we have a non-empty query string, add it to the list - if ( !query.equals("")) { + if (!query.equals("")) { searchQueries.add(query); se.increment(); } } - + // If we didn't extract any search queries, go on to the next artifact if (searchQueries.isEmpty()) { continue; } - + // Extract the rest of the fields needed for the web search artifact BlackboardAttribute browserAttr = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME)); if (browserAttr != null) { @@ -364,7 +364,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { } // Make an artifact for each distinct query - for (String query : searchQueries) { + for (String query : searchQueries) { // If date doesn't exist, change to 0 (instead of 1969) if (last_accessed == -1) { last_accessed = 0; @@ -382,7 +382,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, NbBundle.getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.parentModuleName"), last_accessed)); - postArtifact(createArtifactWithAttributes(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, file, bbattributes)); + postArtifact(createArtifactWithAttributes(BlackboardArtifact.Type.TSK_WEB_SEARCH_QUERY, file, bbattributes)); ++totalQueries; } } @@ -408,17 +408,16 @@ class SearchEngineURLQueryAnalyzer extends Extract { } @Override - public void process(Content dataSource, IngestJobContext context, DataSourceIngestModuleProgress progressBar) { + public void process(Content dataSource, DataSourceIngestModuleProgress progressBar) { this.dataSource = dataSource; - this.context = context; - + progressBar.progress(Bundle.Progress_Message_Find_Search_Query()); this.findSearchQueries(); logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals()); //NON-NLS } @Override - void configExtractor() throws IngestModuleException { + void startUp() throws IngestModuleException { try { PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE, true); } catch (IOException e) { @@ -429,8 +428,4 @@ class SearchEngineURLQueryAnalyzer extends Extract { loadConfigFile(); } - @Override - public void complete() { - logger.info("Search Engine URL Query Analyzer has completed."); //NON-NLS - } } diff --git a/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py b/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py index 27f9be6161..a0ad098623 100644 --- a/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py +++ b/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb.py @@ -172,8 +172,7 @@ class ContactsDbIngestModule(DataSourceIngestModule): )) try: - # index the artifact for keyword search - blackboard.postArtifact(art, ContactsDbIngestModuleFactory.moduleName) + blackboard.postArtifact(art, ContactsDbIngestModuleFactory.moduleName, context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) diff --git a/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb_v2.py b/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb_v2.py index f58a156170..da4c21f560 100644 --- a/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb_v2.py +++ b/pythonExamples/Aug2015DataSourceTutorial/FindContactsDb_v2.py @@ -146,7 +146,7 @@ class ContactsDbIngestModule(DataSourceIngestModule): # Create an instance of the helper class # TODO - Replace with your parser name and Account.Type helper = CommunicationArtifactsHelper(current_case.getSleuthkitCase(), - ContactsDbIngestModuleFactory.moduleName, app_database.getDBFile(), Account.Type.DEVICE) + ContactsDbIngestModuleFactory.moduleName, app_database.getDBFile(), Account.Type.DEVICE, context.getJobId()) # Iterate through each row and create artifacts while result_set.next(): diff --git a/pythonExamples/July2015FileTutorial_BigRound/FindBigRoundFiles.py b/pythonExamples/July2015FileTutorial_BigRound/FindBigRoundFiles.py index 5bf710e9d5..8dc5a59763 100644 --- a/pythonExamples/July2015FileTutorial_BigRound/FindBigRoundFiles.py +++ b/pythonExamples/July2015FileTutorial_BigRound/FindBigRoundFiles.py @@ -92,11 +92,15 @@ class FindBigRoundFilesIngestModule(FileIngestModule): def log(self, level, msg): self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg) + def __init__(self): + self.context = None + # Where any setup and configuration is done # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext. # See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html # TODO: Add any setup code that you need here. def startUp(self, context): + self.context = context self.filesFound = 0 # Throw an IngestModule.IngestModuleException exception if there was a problem setting up @@ -130,8 +134,7 @@ class FindBigRoundFilesIngestModule(FileIngestModule): "Big and Round Files"))).getAnalysisResult() try: - # post the artifact for listeners of artifact events - blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName) + blackboard.postArtifact(art, FindBigRoundFilesIngestModuleFactory.moduleName, context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) diff --git a/pythonExamples/Registry_Example.py b/pythonExamples/Registry_Example.py index f99ead8086..94041bf9e7 100644 --- a/pythonExamples/Registry_Example.py +++ b/pythonExamples/Registry_Example.py @@ -207,9 +207,8 @@ class RegistryExampleIngestModule(DataSourceIngestModule): BlackboardAttribute(attributeIdRunKeyValue, moduleName, registryKey[3]) )) - # index the artifact for keyword search try: - blackboard.postArtifact(art, moduleName) + blackboard.postArtifact(art, moduleName, context.getJobId()) except Blackboard.BlackboardException as ex: self.log(Level.SEVERE, "Unable to index blackboard artifact " + str(art.getArtifactTypeName()), ex) diff --git a/pythonExamples/dataSourceIngestModule.py b/pythonExamples/dataSourceIngestModule.py index ecb4f01477..502d535b21 100644 --- a/pythonExamples/dataSourceIngestModule.py +++ b/pythonExamples/dataSourceIngestModule.py @@ -146,8 +146,7 @@ class SampleJythonDataSourceIngestModule(DataSourceIngestModule): None, "Test file", None, attrs).getAnalysisResult() try: - # post the artifact for listeners of artifact events. - blackboard.postArtifact(art, SampleJythonDataSourceIngestModuleFactory.moduleName) + blackboard.postArtifact(art, SampleJythonDataSourceIngestModuleFactory.moduleName, context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) diff --git a/pythonExamples/fileIngestModule.py b/pythonExamples/fileIngestModule.py index f72f1c4792..50b2478697 100644 --- a/pythonExamples/fileIngestModule.py +++ b/pythonExamples/fileIngestModule.py @@ -94,11 +94,15 @@ class SampleJythonFileIngestModule(FileIngestModule): def log(self, level, msg): self._logger.logp(level, self.__class__.__name__, inspect.stack()[1][3], msg) + def __init__(self): + self.context = None + # Where any setup and configuration is done # 'context' is an instance of org.sleuthkit.autopsy.ingest.IngestJobContext. # See: http://sleuthkit.org/autopsy/docs/api-docs/latest/classorg_1_1sleuthkit_1_1autopsy_1_1ingest_1_1_ingest_job_context.html # TODO: Add any setup code that you need here. def startUp(self, context): + self.context = context self.filesFound = 0 # Throw an IngestModule.IngestModuleException exception if there was a problem setting up @@ -134,8 +138,7 @@ class SampleJythonFileIngestModule(FileIngestModule): None, "Text Files", None, attrs).getAnalysisResult() try: - # post the artifact for listeners of artifact events - blackboard.postArtifact(art, SampleJythonFileIngestModuleFactory.moduleName) + blackboard.postArtifact(art, SampleJythonFileIngestModuleFactory.moduleName, context.getJobId()) except Blackboard.BlackboardException as e: self.log(Level.SEVERE, "Error indexing artifact " + art.getDisplayName()) diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index ddeb88e7da..516b786c7d 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -1,7 +1,7 @@ /* * Autopsy Forensic Browser * - * Copyright 2011-2020 Basis Technology Corp. + * Copyright 2012-2021 Basis Technology Corp. * Contact: carrier sleuthkit org * * Licensed under the Apache License, Version 2.0 (the "License"); @@ -255,7 +255,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule { try { // index the artifact for keyword search - blackboard.postArtifact(artifact, EmailParserModuleFactory.getModuleName()); + blackboard.postArtifact(artifact, EmailParserModuleFactory.getModuleName(), context.getJobId()); } catch (Blackboard.BlackboardException ex) { MessageNotifyUtil.Notify.error(Bundle.ThunderbirdMboxFileIngestModule_processPst_indexError_message(), artifact.getDisplayName()); logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS @@ -786,7 +786,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule { try { // index the artifact for keyword search - blackboard.postArtifact(bbart, EmailParserModuleFactory.getModuleName()); + blackboard.postArtifact(bbart, EmailParserModuleFactory.getModuleName(), context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bbart.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error(Bundle.ThunderbirdMboxFileIngestModule_addArtifact_indexError_message(), bbart.getDisplayName()); diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/VcardParser.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/VcardParser.java index a0c62cec5a..f83025cbb8 100755 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/VcardParser.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/VcardParser.java @@ -243,7 +243,7 @@ final class VcardParser { // Index the artifact for keyword search. try { - blackboard.postArtifact(artifact, EmailParserModuleFactory.getModuleName()); + blackboard.postArtifact(artifact, EmailParserModuleFactory.getModuleName(), context.getJobId()); } catch (Blackboard.BlackboardException ex) { logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS MessageNotifyUtil.Notify.error(Bundle.VcardParser_addContactArtifact_indexError(), artifact.getDisplayName()); @@ -423,7 +423,7 @@ final class VcardParser { } try { - BlackboardAttribute.Type attributeType = tskCase.getAttributeType(attributeTypeName); + BlackboardAttribute.Type attributeType = tskCase.getBlackboard().getAttributeType(attributeTypeName); if (attributeType == null) { try{ // Add this attribute type to the case database. @@ -479,7 +479,7 @@ final class VcardParser { attributeTypeName = "TSK_EMAIL"; } try { - BlackboardAttribute.Type attributeType = tskCase.getAttributeType(attributeTypeName); + BlackboardAttribute.Type attributeType = tskCase.getBlackboard().getAttributeType(attributeTypeName); if (attributeType == null) { // Add this attribute type to the case database. attributeType = tskCase.getBlackboard().getOrAddAttributeType(attributeTypeName,