diff --git a/Core/build.xml b/Core/build.xml
index 3e2c4a0b58..28e64b83e5 100644
--- a/Core/build.xml
+++ b/Core/build.xml
@@ -39,7 +39,11 @@
-
+
+
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
index c9640f1a13..640f4f8825 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
@@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.casemodule;
import com.google.common.annotations.Beta;
+import com.google.common.eventbus.Subscribe;
import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData;
import java.awt.Frame;
import java.awt.event.ActionEvent;
@@ -68,7 +69,6 @@ import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.actions.OpenOutputFolderAction;
import org.sleuthkit.autopsy.appservices.AutopsyService;
import org.sleuthkit.autopsy.appservices.AutopsyService.CaseContext;
-import static org.sleuthkit.autopsy.casemodule.Bundle.*;
import org.sleuthkit.autopsy.casemodule.CaseMetadata.CaseMetadataException;
import org.sleuthkit.autopsy.casemodule.datasourcesummary.DataSourceSummaryAction;
import org.sleuthkit.autopsy.casemodule.events.AddingDataSourceEvent;
@@ -108,12 +108,17 @@ import org.sleuthkit.autopsy.events.AutopsyEventException;
import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestManager;
+import org.sleuthkit.autopsy.ingest.IngestServices;
+import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
import org.sleuthkit.autopsy.progress.LoggingProgressIndicator;
import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator;
import org.sleuthkit.autopsy.progress.ProgressIndicator;
import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
+import org.sleuthkit.autopsy.timeline.events.TimelineEventAddedEvent;
+import org.sleuthkit.datamodel.Blackboard;
+import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.CaseDbConnectionInfo;
import org.sleuthkit.datamodel.Content;
@@ -121,6 +126,7 @@ import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
@@ -155,6 +161,7 @@ public class Case {
private CollaborationMonitor collaborationMonitor;
private Services caseServices;
private boolean hasDataSources;
+ private final TSKCaseRepublisher tskEventForwarder = new TSKCaseRepublisher();
/*
* Get a reference to the main window of the desktop application to use to
@@ -388,13 +395,44 @@ public class Case {
*/
TAG_DEFINITION_CHANGED,
/**
- * An item in the central repository has had its comment modified. The
- * old value is null, the new value is string for current comment.
+ * An timeline event, such mac time or web activity was added to the
+ * current case. The old value is null and the new value is the
+ * TimelineEvent that was added.
+ */
+ TIMELINE_EVENT_ADDED,
+ /* An item in the central repository has had its comment
+ * modified. The old value is null, the new value is string for current
+ * comment.
*/
CR_COMMENT_CHANGED;
};
+ private final class TSKCaseRepublisher {
+
+ @Subscribe
+ public void rebroadcastTimelineEventCreated(TimelineManager.TimelineEventAddedEvent event) {
+ eventPublisher.publish(new TimelineEventAddedEvent(event));
+ }
+
+ @SuppressWarnings("deprecation")
+ @Subscribe
+ public void rebroadcastArtifactsPosted(Blackboard.ArtifactsPostedEvent event) {
+ for (BlackboardArtifact.Type artifactType : event.getArtifactTypes()) {
+ /*
+ * fireModuleDataEvent is deprecated so module writers don't use
+ * it (they should use Blackboard.postArtifact(s) instead), but
+ * we still need a way to rebroadcast the ArtifactsPostedEvent
+ * as a ModuleDataEvent.
+ */
+ IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(
+ event.getModuleName(),
+ artifactType,
+ event.getArtifacts(artifactType)));
+ }
+ }
+ }
+
/**
* Adds a subscriber to all case events. To subscribe to only specific
* events, use one of the overloads of addEventSubscriber.
@@ -499,8 +537,8 @@ public class Case {
*/
public static boolean isValidName(String caseName) {
return !(caseName.contains("\\") || caseName.contains("/") || caseName.contains(":")
- || caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
- || caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
+ || caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
+ || caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
}
/**
@@ -2128,7 +2166,7 @@ public class Case {
} else if (UserPreferences.getIsMultiUserModeEnabled()) {
caseDb = SleuthkitCase.openCase(databaseName, UserPreferences.getDatabaseConnectionInfo(), metadata.getCaseDirectory());
} else {
- throw new CaseActionException(Case_open_exception_multiUserCaseNotEnabled());
+ throw new CaseActionException(Bundle.Case_open_exception_multiUserCaseNotEnabled());
}
} catch (TskUnsupportedSchemaVersionException ex) {
throw new CaseActionException(Bundle.Case_exceptionMessage_unsupportedSchemaVersionMessage(ex.getLocalizedMessage()), ex);
@@ -2150,6 +2188,8 @@ public class Case {
private void openCaseLevelServices(ProgressIndicator progressIndicator) {
progressIndicator.progress(Bundle.Case_progressMessage_openingCaseLevelServices());
this.caseServices = new Services(caseDb);
+
+ caseDb.registerForEvents(tskEventForwarder);
}
/**
@@ -2415,6 +2455,7 @@ public class Case {
*/
if (null != caseDb) {
progressIndicator.progress(Bundle.Case_progressMessage_closingCaseDatabase());
+ caseDb.unregisterForEvents(tskEventForwarder);
caseDb.close();
}
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java
index 6e954ce725..b845b97036 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Blackboard.java
@@ -19,54 +19,38 @@
package org.sleuthkit.autopsy.casemodule.services;
import java.io.Closeable;
-import java.io.IOException;
-import org.openide.util.Lookup;
-import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
+import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
-import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.TskDataException;
/**
* A representation of the blackboard, a place where artifacts and their
* attributes are posted.
*
- * NOTE: This API of this class is under development.
+ * @deprecated Use org.sleuthkit.datamodel.Blackboard instead.
*/
+@Deprecated
public final class Blackboard implements Closeable {
-
- private SleuthkitCase caseDb;
-
+
/**
* Constructs a representation of the blackboard, a place where artifacts
* and their attributes are posted.
- *
- * @param casedb The case database.
*/
- Blackboard(SleuthkitCase casedb) {
- this.caseDb = casedb;
+ Blackboard() {
}
/**
- * Indexes the text associated with the an artifact.
+ * Indexes the text associated with an artifact.
*
* @param artifact The artifact to be indexed.
*
* @throws BlackboardException If there is a problem indexing the artifact.
*/
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
- if (null == caseDb) {
- throw new BlackboardException("Blackboard has been closed");
- }
- KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
- if (null == searchService) {
- throw new BlackboardException("Keyword search service not found");
- }
- try {
- searchService.index(artifact);
- } catch (TskCoreException ex) {
- throw new BlackboardException("Error indexing artifact", ex);
+ try{
+ Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "");
+ } catch(org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
+ throw new BlackboardException(ex.getMessage(), ex);
}
}
@@ -83,19 +67,10 @@ public final class Blackboard implements Closeable {
* artifact type.
*/
public synchronized BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException {
- if (null == caseDb) {
- throw new BlackboardException("Blackboard has been closed");
- }
try {
- return caseDb.addBlackboardArtifactType(typeName, displayName);
- } catch (TskDataException typeExistsEx) {
- try {
- return caseDb.getArtifactType(typeName);
- } catch (TskCoreException ex) {
- throw new BlackboardException("Failed to get or add artifact type", ex);
- }
- } catch (TskCoreException ex) {
- throw new BlackboardException("Failed to get or add artifact type", ex);
+ return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddArtifactType(typeName, displayName);
+ } catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
+ throw new BlackboardException(ex.getMessage(), ex);
}
}
@@ -113,30 +88,20 @@ public final class Blackboard implements Closeable {
* attribute type.
*/
public synchronized BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException {
- if (null == caseDb) {
- throw new BlackboardException("Blackboard has been closed");
- }
try {
- return caseDb.addArtifactAttributeType(typeName, valueType, displayName);
- } catch (TskDataException typeExistsEx) {
- try {
- return caseDb.getAttributeType(typeName);
- } catch (TskCoreException ex) {
- throw new BlackboardException("Failed to get or add attribute type", ex);
- }
- } catch (TskCoreException ex) {
- throw new BlackboardException("Failed to get or add attribute type", ex);
+ return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddAttributeType(typeName, valueType, displayName);
+ } catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
+ throw new BlackboardException(ex.getMessage(), ex);
}
}
/**
* Closes the blackboard.
*
- * @throws IOException If there is a problem closing the blackboard.
*/
@Override
- public synchronized void close() throws IOException {
- caseDb = null;
+ public synchronized void close() {
+
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
index 1e8d8fcb2e..ff3b32a495 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/services/Services.java
@@ -26,6 +26,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.openide.util.Lookup;
+import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.datamodel.SleuthkitCase;
@@ -39,7 +40,6 @@ public class Services implements Closeable {
private final FileManager fileManager;
private final TagsManager tagsManager;
private final KeywordSearchService keywordSearchService;
- private final Blackboard blackboard;
/**
* Constructs a collection of case-level services (e.g., file manager, tags
@@ -59,9 +59,6 @@ public class Services implements Closeable {
//null safe so that the functional tests run with no issues.
keywordSearchService = Lookup.getDefault().lookup(KeywordSearchService.class);
services.add(keywordSearchService);
-
- blackboard = new Blackboard(caseDb);
- services.add(blackboard);
}
/**
@@ -95,9 +92,21 @@ public class Services implements Closeable {
* Gets the blackboard service for the current case.
*
* @return The blackboard service for the current case.
+ *
+ * @deprecated Use org.sleuthkit.autopsy.casemodule.getCaseBlackboard instead
*/
+ @Deprecated
public Blackboard getBlackboard() {
- return blackboard;
+ return new Blackboard();
+ }
+
+ /**
+ * Gets the TSK Blackboard for the current case.
+ *
+ * @return @org.sleuthkit.datamodel.Blackboard Blackboard for the current case.
+ */
+ public org.sleuthkit.datamodel.Blackboard getCaseBlackboard() {
+ return Case.getCurrentCase().getSleuthkitCase().getBlackboard();
}
/**
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
index 58bf031359..31f93018b6 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
@@ -23,6 +23,7 @@ import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import static java.lang.Boolean.FALSE;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
@@ -34,35 +35,40 @@ import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
-import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
-import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.ingest.IngestManager;
-import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.ThreadUtils;
+import org.sleuthkit.autopsy.ingest.IngestManager;
+import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
-import org.sleuthkit.autopsy.coreutils.ThreadUtils;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
/**
* Listen for ingest events and update entries in the Central Repository
* database accordingly
*/
+@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Correlation Engine"})
public class IngestEventsListener {
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
+ private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
final Collection recentlyAddedCeArtifacts = new LinkedHashSet<>();
private static int correlationModuleInstanceCount;
@@ -171,8 +177,7 @@ public class IngestEventsListener {
}
/**
- * Configure the listener to flag devices previously seen in other cases or
- * not.
+ * Configure the listener to flag previously seen devices or not.
*
* @param value True to flag seen devices; otherwise false.
*/
@@ -189,86 +194,68 @@ public class IngestEventsListener {
createCrProperties = value;
}
+ /**
+ * Make an Interesting Item artifact based on a new artifact being previously seen.
+ * @param originalArtifact Original artifact that we want to flag
+ * @param caseDisplayNames List of case names artifact was previously seen in
+ */
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
- "IngestEventsListener.prevCaseComment.text=Previous Case: ",
- "IngestEventsListener.ingestmodule.name=Correlation Engine"})
- static private void postCorrelatedBadArtifactToBlackboard(BlackboardArtifact bbArtifact, List caseDisplayNames) {
+ "IngestEventsListener.prevCaseComment.text=Previous Case: "})
+ static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List caseDisplayNames) {
- try {
- String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
-
- Collection attributes = new ArrayList<>();
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
- Bundle.IngestEventsListener_prevTaggedSet_text()));
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
- Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
-
- SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
- AbstractFile abstractFile = tskCase.getAbstractFileById(bbArtifact.getObjectID());
- org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
- // Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
- BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
- tifArtifact.addAttributes(attributes);
-
- try {
- // index the artifact for keyword search
- Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
- blackboard.indexArtifact(tifArtifact);
- } catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
- LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
- }
-
- // fire event to notify UI of this new artifact
- IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
- }
- } catch (TskCoreException ex) {
- LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
- } catch (IllegalStateException ex) {
- LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
- }
+ Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
+ TSK_SET_NAME, MODULE_NAME,
+ Bundle.IngestEventsListener_prevTaggedSet_text()),
+ new BlackboardAttribute(
+ TSK_COMMENT, MODULE_NAME,
+ Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))),
+ new BlackboardAttribute(
+ TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
+ originalArtifact.getArtifactID()));
+ makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
}
/**
- * Create an Interesting Aritfact hit for a device which was previously seen
+ * Create an Interesting Artifact hit for a device which was previously seen
* in the central repository.
*
- * @param bbArtifact the artifact to create the interesting item for
+ * @param originalArtifact the artifact to create the interesting item for
*/
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
"# {0} - typeName",
"# {1} - count",
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
- static private void postCorrelatedPreviousArtifactToBlackboard(BlackboardArtifact bbArtifact) {
+ static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact) {
+ Collection attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
+ TSK_SET_NAME, MODULE_NAME,
+ Bundle.IngestEventsListener_prevExists_text()),
+ new BlackboardAttribute(
+ TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
+ originalArtifact.getArtifactID()));
+ makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
+ }
+ /**
+ * Make an interesting item artifact to flag the passed in artifact.
+ * @param originalArtifact Artifact in current case we want to flag
+ * @param attributesForNewArtifact Attributes to assign to the new Interesting items artifact
+ */
+ private static void makeAndPostInterestingArtifact(BlackboardArtifact originalArtifact, Collection attributesForNewArtifact) {
try {
- String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
-
- Collection attributes = new ArrayList<>();
- BlackboardAttribute att = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
- Bundle.IngestEventsListener_prevExists_text());
- attributes.add(att);
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
-
- SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
- AbstractFile abstractFile = bbArtifact.getSleuthkitCase().getAbstractFileById(bbArtifact.getObjectID());
- org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
+ SleuthkitCase tskCase = originalArtifact.getSleuthkitCase();
+ AbstractFile abstractFile = tskCase.getAbstractFileById(originalArtifact.getObjectID());
+ Blackboard blackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
- BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
- tifArtifact.addAttributes(attributes);
+ if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_ARTIFACT_HIT, attributesForNewArtifact)) {
+ BlackboardArtifact newInterestingArtifact = abstractFile.newArtifact(TSK_INTERESTING_ARTIFACT_HIT);
+ newInterestingArtifact.addAttributes(attributesForNewArtifact);
try {
// index the artifact for keyword search
- Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
- blackboard.indexArtifact(tifArtifact);
- } catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
- LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
+ blackboard.postArtifact(newInterestingArtifact, MODULE_NAME);
+ } catch (Blackboard.BlackboardException ex) {
+ LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newInterestingArtifact.getArtifactID(), ex); //NON-NLS
}
-
- // fire event to notify UI of this new artifact
- IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
@@ -283,7 +270,7 @@ public class IngestEventsListener {
public void propertyChange(PropertyChangeEvent evt) {
//if ingest is running we want there to check if there is a Correlation Engine module running
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
- //in these cases we still want to create correlation attributes for those artifacts when appropriate
+ //in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
EamDb dbManager;
try {
@@ -319,7 +306,7 @@ public class IngestEventsListener {
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
return;
}
-
+
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
case DATA_SOURCE_ANALYSIS_COMPLETED: {
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
@@ -333,10 +320,10 @@ public class IngestEventsListener {
}
private final class AnalysisCompleteTask implements Runnable {
-
+
private final EamDb dbManager;
private final PropertyChangeEvent event;
-
+
private AnalysisCompleteTask(EamDb db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
@@ -362,15 +349,15 @@ public class IngestEventsListener {
long dataSourceObjectId = -1;
try {
dataSource = ((DataSourceAnalysisCompletedEvent) event).getDataSource();
-
+
/*
- * We only care about Images for the purpose of updating hash
- * values.
+ * We only care about Images for the purpose of
+ * updating hash values.
*/
if (!(dataSource instanceof Image)) {
return;
}
-
+
dataSourceName = dataSource.getName();
dataSourceObjectId = dataSource.getId();
@@ -398,7 +385,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
correlationDataSource.setMd5(imageMd5Hash);
}
-
+
String imageSha1Hash = image.getSha1();
if (imageSha1Hash == null) {
imageSha1Hash = "";
@@ -407,7 +394,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
correlationDataSource.setSha1(imageSha1Hash);
}
-
+
String imageSha256Hash = image.getSha256();
if (imageSha256Hash == null) {
imageSha256Hash = "";
@@ -441,8 +428,8 @@ public class IngestEventsListener {
private final boolean createCorrelationAttributes;
private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes) {
- dbManager = db;
- event = evt;
+ this.dbManager = db;
+ this.event = evt;
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
this.createCorrelationAttributes = createCorrelationAttributes;
@@ -476,7 +463,7 @@ public class IngestEventsListener {
try {
caseDisplayNames = dbManager.getListCasesHavingArtifactInstancesKnownBad(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
if (!caseDisplayNames.isEmpty()) {
- postCorrelatedBadArtifactToBlackboard(bbArtifact,
+ makeAndPostPreviousNotableArtifact(bbArtifact,
caseDisplayNames);
}
} catch (CorrelationAttributeNormalizationException ex) {
@@ -484,7 +471,7 @@ public class IngestEventsListener {
}
}
if (flagPreviousItemsEnabled
- && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
+ && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
@@ -494,7 +481,7 @@ public class IngestEventsListener {
List previousOccurences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
for (CorrelationAttributeInstance instance : previousOccurences) {
if (!instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) {
- postCorrelatedPreviousArtifactToBlackboard(bbArtifact);
+ makeAndPostPreviousSeenArtifact(bbArtifact);
break;
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java
index f244421880..792a3f2ebb 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/CentralRepoIngestModule.java
@@ -19,41 +19,44 @@
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
-import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.util.NbBundle.Messages;
-import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
+import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
+import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
+import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.core.RuntimeProperties;
+import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
+import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
+import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
-import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
-import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
-import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.HashUtility;
+import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
-import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
-import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
-import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
-import org.sleuthkit.datamodel.SleuthkitCase;
/**
* Ingest module for inserting entries into the Central Repository database on
@@ -63,6 +66,8 @@ import org.sleuthkit.datamodel.SleuthkitCase;
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
final class CentralRepoIngestModule implements FileIngestModule {
+ private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
+
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true;
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true;
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
@@ -74,10 +79,10 @@ final class CentralRepoIngestModule implements FileIngestModule {
private long jobId;
private CorrelationCase eamCase;
private CorrelationDataSource eamDataSource;
- private Blackboard blackboard;
private CorrelationAttributeInstance.Type filesType;
private final boolean flagTaggedNotableItems;
private final boolean flagPreviouslySeenDevices;
+ private Blackboard blackboard;
private final boolean createCorrelationProperties;
/**
@@ -104,7 +109,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
}
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR;
@@ -158,7 +163,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
}
}
- // insert this file into the central repository
+ // insert this file into the central repository
if (createCorrelationProperties) {
try {
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
@@ -271,7 +276,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// Don't allow sqlite central repo databases to be used for multi user cases
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
- && (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
+ && (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
}
@@ -308,7 +313,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// if we are the first thread / module for this job, then make sure the case
// and image exist in the DB before we associate artifacts with it.
if (refCounter.incrementAndGet(jobId)
- == 1) {
+ == 1) {
// ensure we have this data source in the EAM DB
try {
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
@@ -330,41 +335,32 @@ final class CentralRepoIngestModule implements FileIngestModule {
*/
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List caseDisplayNames) {
+ Collection attributes = Arrays.asList(
+ new BlackboardAttribute(
+ TSK_SET_NAME, MODULE_NAME,
+ Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
+ new BlackboardAttribute(
+ TSK_COMMENT, MODULE_NAME,
+ Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))));
try {
- String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
- Collection attributes = new ArrayList<>();
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
- Bundle.CentralRepoIngestModule_prevTaggedSet_text()));
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
- Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
-
- SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
- org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
- BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT);
tifArtifact.addAttributes(attributes);
-
try {
// index the artifact for keyword search
- blackboard.indexArtifact(tifArtifact);
+ blackboard.postArtifact(tifArtifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
-
// send inbox message
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash());
-
- // fire event to notify UI of this new artifact
- services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
- } catch (NoCurrentCaseException ex) {
- logger.log(Level.SEVERE, "Exception while getting open case.", ex); // NON-NLS
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
index 0f07f9a084..2ad2305d38 100755
--- a/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/communications/Bundle.properties-MERGED
@@ -57,7 +57,7 @@ CVTTopComponent.browseVisualizeTabPane.AccessibleContext.accessibleName=Visualiz
CVTTopComponent.vizPanel.TabConstraints.tabTitle_1=Visualize
VisualizationPanel.fitGraphButton.text=
VisualizationPanel.jTextArea1.text=Right-click an account in the Browse Accounts table, and select 'Visualize' to begin.
-VisualizationPanel.fitZoomButton.toolTipText=Fit Visualization
+VisualizationPanel.fitZoomButton.toolTipText=Fit visualization to available space.
VisualizationPanel.fitZoomButton.text=
# {0} - layout name
VisualizationPanel.layoutFail.text={0} layout failed. Try a different layout.
@@ -67,11 +67,11 @@ VisualizationPanel.lockAction.pluralText=Lock Selected Accounts
VisualizationPanel.lockAction.singularText=Lock Selected Account
VisualizationPanel.unlockAction.pluralText=Unlock Selected Accounts
VisualizationPanel.unlockAction.singularText=Unlock Selected Account
-VisualizationPanel.zoomActualButton.toolTipText=Reset Zoom
+VisualizationPanel.zoomActualButton.toolTipText=Reset visualization default zoom state.
VisualizationPanel.zoomActualButton.text=
-VisualizationPanel.zoomInButton.toolTipText=Zoom In
+VisualizationPanel.zoomInButton.toolTipText=Zoom visualization in.
VisualizationPanel.zoomInButton.text=
-VisualizationPanel.zoomOutButton.toolTipText=Zoom Out
+VisualizationPanel.zoomOutButton.toolTipText=Zoom visualization out.
VisualizationPanel.zoomOutButton.text=
VisualizationPanel.fastOrganicLayoutButton.text=
VisualizationPanel.backButton.text_1=
@@ -81,17 +81,17 @@ VisualizationPanel.hierarchyLayoutButton.text=Hierarchical
VisualizationPanel.clearVizButton.text_1=
VisualizationPanel.snapshotButton.text_1=Snapshot Report
VisualizationPanel.clearVizButton.actionCommand=
-VisualizationPanel.backButton.toolTipText=Click to Go Back
-VisualizationPanel.forwardButton.toolTipText=Click to Go Forward
-VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to Redraw Chart
-VisualizationPanel.clearVizButton.toolTipText=Click to Clear Chart
+VisualizationPanel.backButton.toolTipText=Click to go back to previous state.
+VisualizationPanel.forwardButton.toolTipText=Click to move state forward.
+VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to redraw visualization.
+VisualizationPanel.clearVizButton.toolTipText=Click to clear visualization.
FiltersPanel.limitHeaderLabel.text=Communications Limit:
FiltersPanel.mostRecentLabel.text=Most Recent:
FiltersPanel.limitErrorMsgLabel.text=Invalid integer value.
VisualizationPanel.forwardButton.text=
VisualizationPanel.zoomPercentLabel.text=100%
VisualizationPanel.zoomLabel.text=Zoom:
-VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot Report
+VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot report.
VisualizationPanel_action_dialogs_title=Communications
VisualizationPanel_action_name_text=Snapshot Report
VisualizationPanel_module_name=Communications
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
index 0a76b2a7fa..9a211c57a0 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/ExecUtil.java
@@ -196,6 +196,53 @@ public final class ExecUtil {
}
return process.exitValue();
}
+
+ /**
+ * Wait for the given process to finish, using the given ProcessTerminator.
+ *
+ * @param command The command that was used to start the process. Used
+ * only for logging purposes.
+ * @param process The process to wait for.
+ * @param terminator The ProcessTerminator used to determine if the process
+ * should be killed.
+ *
+ * @returnthe exit value of the process
+ *
+ * @throws SecurityException if a security manager exists and vetoes any
+ * aspect of running the process.
+ * @throws IOException if an I/o error occurs.
+ */
+ public static int waitForTermination(String command, Process process, ProcessTerminator terminator) throws SecurityException, IOException {
+ return ExecUtil.waitForTermination(command, process, ExecUtil.DEFAULT_TIMEOUT, ExecUtil.DEFAULT_TIMEOUT_UNITS, terminator);
+ }
+
+ private static int waitForTermination(String command, Process process, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException {
+ try {
+ do {
+ process.waitFor(timeOut, units);
+ if (process.isAlive() && terminator.shouldTerminateProcess()) {
+ killProcess(process);
+ try {
+ process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
+ } catch (InterruptedException exx) {
+ Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
+ }
+ }
+ } while (process.isAlive());
+ } catch (InterruptedException ex) {
+ if (process.isAlive()) {
+ killProcess(process);
+ }
+ try {
+ process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
+ } catch (InterruptedException exx) {
+ Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
+ }
+ Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, "Thread interrupted while running {0}", command); // NON-NLS
+ Thread.currentThread().interrupt();
+ }
+ return process.exitValue();
+ }
/**
* Kills a process and its children
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteDBConnect.java b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteDBConnect.java
index ada674433a..d6c9097cd3 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteDBConnect.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/SQLiteDBConnect.java
@@ -1,19 +1,19 @@
- /*
+/*
*
* Autopsy Forensic Browser
- *
- * Copyright 2012 Basis Technology Corp.
- *
+ *
+ * Copyright 2012-2018 Basis Technology Corp.
+ *
* Copyright 2012 42six Solutions.
* Contact: aebadirad 42six com
* Project Contact/Architect: carrier sleuthkit org
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -27,12 +27,12 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
-import org.sleuthkit.autopsy.coreutils.Logger;
+import java.util.logging.Level;
/**
- * Database connection class & utilities *
+ * Database connection class & utilities.
*/
-public class SQLiteDBConnect {
+public class SQLiteDBConnect implements AutoCloseable {
public String sDriver = "";
public String sUrl = null;
@@ -52,7 +52,7 @@ public class SQLiteDBConnect {
* quick and dirty constructor to test the database passing the
* DriverManager name and the fully loaded url to handle
*/
- /*
+ /*
* NB this will typically be available if you make this class concrete and
* not abstract
*/
@@ -104,9 +104,13 @@ public class SQLiteDBConnect {
statement.executeUpdate(instruction);
}
-// processes an array of instructions e.g. a set of SQL command strings passed from a file
-//NB you should ensure you either handle empty lines in files by either removing them or parsing them out
-// since they will generate spurious SQLExceptions when they are encountered during the iteration....
+ /** processes an array of instructions e.g. a set of SQL command strings
+ * passed from a file
+ *
+ * NB you should ensure you either handle empty lines in files by either
+ * removing them or parsing them out since they will generate spurious
+ * SQLExceptions when they are encountered during the iteration....
+ */
public void executeStmt(String[] instructionSet) throws SQLException {
for (int i = 0; i < instructionSet.length; i++) {
executeStmt(instructionSet[i]);
@@ -120,7 +124,14 @@ public class SQLiteDBConnect {
public void closeConnection() {
try {
conn.close();
- } catch (Exception ignore) {
+ } catch (SQLException ex) {
+ logger.log(Level.WARNING, "Unable to close connection to SQLite DB at " + sUrl, ex);
}
+ //Implementing Autoclosable.close() allows this class to be used in try-with-resources.
+ }
+
+ @Override
+ public void close() {
+ closeConnection();
}
}
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
index 57cdd1bf48..7f0a4d2419 100755
--- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
@@ -160,6 +160,7 @@ KeywordHits.createSheet.numChildren.name=Number of Children
KeywordHits.kwHits.text=Keyword Hits
KeywordHits.simpleLiteralSearch.text=Single Literal Keyword Search
KeywordHits.singleRegexSearch.text=Single Regular Expression Search
+LayoutFileNode.getActions.viewFileInDir.text=View File in Directory
OpenIDE-Module-Name=DataModel
AbstractContentChildren.CreateTSKNodeVisitor.exception.noNodeMsg=No Node defined for the given SleuthkitItem
AbstractContentChildren.createAutopsyNodeVisitor.exception.noNodeMsg=No Node defined for the given DisplayableItem
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNode.java
index c723d99b55..1673fd577f 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNode.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNode.java
@@ -51,7 +51,7 @@ public abstract class DisplayableItemNode extends AbstractNode {
*
* @throws TskCoreException
*/
- static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
+ protected static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
BlackboardAttribute pathIDAttribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
if (pathIDAttribute != null) {
long contentID = pathIDAttribute.getValueLong();
diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java
index 9a1bd96cc7..152df6f55f 100644
--- a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java
@@ -34,13 +34,10 @@ import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
-import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
-import org.sleuthkit.datamodel.FsContent;
-import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java
index 188d76acde..b3042fb4a5 100644
--- a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java
@@ -1,16 +1,16 @@
/*
* Sample module in the public domain. Feel free to use this as a template
* for your modules.
- *
+ *
* Contact: Brian Carrier [carrier sleuthkit [dot] org]
*
* This is free and unencumbered software released into the public domain.
- *
+ *
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
- *
+ *
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
@@ -18,34 +18,31 @@
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
- *
+ *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
+ * OTHER DEALINGS IN THE SOFTWARE.
*/
package org.sleuthkit.autopsy.examples;
import java.util.HashMap;
import java.util.logging.Level;
-import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
-import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
-import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
+import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
+import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
-import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskData;
/**
@@ -56,7 +53,7 @@ import org.sleuthkit.datamodel.TskData;
class SampleFileIngestModule implements FileIngestModule {
private static final HashMap artifactCountsForIngestJobs = new HashMap<>();
- private static BlackboardAttribute.ATTRIBUTE_TYPE attrType = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
+ private static final BlackboardAttribute.ATTRIBUTE_TYPE ATTR_TYPE = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
private final boolean skipKnownFiles;
private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
@@ -76,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule {
// Skip anything other than actual file system files.
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
- || (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
- || (file.isFile() == false)) {
+ || (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
+ || (file.isFile() == false)) {
return IngestModule.ProcessResult.OK;
}
@@ -101,7 +98,7 @@ class SampleFileIngestModule implements FileIngestModule {
// Make an attribute using the ID for the attribute attrType that
// was previously created.
- BlackboardAttribute attr = new BlackboardAttribute(attrType, SampleIngestModuleFactory.getModuleName(), count);
+ BlackboardAttribute attr = new BlackboardAttribute(ATTR_TYPE, SampleIngestModuleFactory.getModuleName(), count);
// Add the to the general info artifact for the file. In a
// real module, you would likely have more complex data types
@@ -113,13 +110,15 @@ class SampleFileIngestModule implements FileIngestModule {
// management of shared data.
addToBlackboardPostCount(context.getJobId(), 1L);
- // Fire an event to notify any listeners for blackboard postings.
- ModuleDataEvent event = new ModuleDataEvent(SampleIngestModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_GEN_INFO);
- IngestServices.getInstance().fireModuleDataEvent(event);
+ /*
+ * post the artifact which will index the artifact for keyword
+ * search, and fire an event to notify UI of this new artifact
+ */
+ file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
return IngestModule.ProcessResult.OK;
- } catch (TskCoreException ex) {
+ } catch (TskCoreException | Blackboard.BlackboardException ex) {
IngestServices ingestServices = IngestServices.getInstance();
Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName());
logger.log(Level.SEVERE, "Error processing file (id = " + file.getId() + ")", ex);
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobSettings.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobSettings.java
index 8216d66528..80251599fd 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobSettings.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobSettings.java
@@ -329,14 +329,38 @@ public final class IngestJobSettings {
for (IngestModuleFactory moduleFactory : moduleFactories) {
loadedModuleNames.add(moduleFactory.getModuleDisplayName());
}
+
+ /**
+ * Hard coding Plaso to be disabled by default. loadedModuleNames is
+ * passed below as the default list of enabled modules so briefly remove
+ * Plaso from loaded modules to get the list of enabled and disabled
+ * modules names. Then put Plaso back into loadedModulesNames to let the
+ * rest of the code continue as before.
+ */
+ final String plasoModuleName = "Plaso";
+ boolean plasoLoaded = loadedModuleNames.contains(plasoModuleName);
+ if (plasoLoaded) {
+ loadedModuleNames.remove(plasoModuleName);
+ }
/**
* Get the enabled/disabled ingest modules settings for this context. By
- * default, all loaded modules are enabled.
+ * default, all loaded modules except Plaso are enabled.
*/
HashSet enabledModuleNames = getModulesNames(executionContext, IngestJobSettings.ENABLED_MODULES_PROPERTY, makeCsvList(loadedModuleNames));
- HashSet disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, ""); //NON-NLS
+ HashSet disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, plasoModuleName); //NON-NLS
+ // If plaso was loaded, but appears in neither the enabled nor the
+ // disabled list, add it to the disabled list.
+ if (!enabledModuleNames.contains(plasoModuleName) && !disabledModuleNames.contains(plasoModuleName)) {
+ disabledModuleNames.add(plasoModuleName);
+ }
+
+ //Put plaso back into loadedModuleNames
+ if (plasoLoaded) {
+ loadedModuleNames.add(plasoModuleName);
+ }
+
/**
* Check for missing modules and create warnings if any are found.
*/
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java
index 8108bba1b2..3117ef2f98 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestServices.java
@@ -104,9 +104,13 @@ public final class IngestServices {
*
* @param moduleDataEvent A module data event, i.e., an event that
* encapsulates artifact data.
+ *
+ * @deprecated use org.sleuthkit.datamodel.Blackboard.postArtifact instead.
*/
+ @Deprecated
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
IngestManager.getInstance().fireIngestModuleDataEvent(moduleDataEvent);
+
}
/**
@@ -171,7 +175,6 @@ public final class IngestServices {
*
* @param moduleName A unique identifier for the module.
* @param settings A mapping of setting names to setting values.
- *
*/
public void setConfigSettings(String moduleName, Map settings) {
ModuleSettings.setConfigSettings(moduleName, settings);
diff --git a/Core/src/org/sleuthkit/autopsy/ingest/ModuleDataEvent.java b/Core/src/org/sleuthkit/autopsy/ingest/ModuleDataEvent.java
index d78169e251..c463fd69ec 100644
--- a/Core/src/org/sleuthkit/autopsy/ingest/ModuleDataEvent.java
+++ b/Core/src/org/sleuthkit/autopsy/ingest/ModuleDataEvent.java
@@ -53,7 +53,7 @@ public class ModuleDataEvent extends ChangeEvent {
private Collection artifacts;
/**
- * @param moduleName Module name
+ * @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard
*/
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType) {
@@ -63,9 +63,9 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
- * @param moduleName Module Name
+ * @param moduleName Module Name
* @param blackboardArtifactType Type of the blackboard artifact posted to
- * the blackboard
+ * the blackboard
*/
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType) {
super(blackboardArtifactType);
@@ -74,10 +74,10 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
- * @param moduleName Module name
+ * @param moduleName Module name
* @param blackboardArtifactType Type of artifact posted to the blackboard
- * @param artifacts List of specific artifact ID values that were added to
- * blackboard
+ * @param artifacts List of specific artifact ID values that
+ * were added to blackboard
*/
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType, Collection artifacts) {
this(moduleName, blackboardArtifactType);
@@ -85,10 +85,10 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
- * @param moduleName Module name
+ * @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard
- * @param artifacts List of specific artifact values that were added to
- * blackboard
+ * @param artifacts List of specific artifact values that were added to
+ * blackboard
*/
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType, Collection artifacts) {
this(moduleName, artifactType);
diff --git a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java
index 1a9e947b76..d033a36a0f 100644
--- a/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java
+++ b/Core/src/org/sleuthkit/autopsy/keywordsearchservice/KeywordSearchService.java
@@ -48,13 +48,20 @@ public interface KeywordSearchService extends Closeable {
* all of its attributes.
*
* @param artifact The artifact to index.
+ *
+ * @deprecated Call org.sleuthkit.datamodel.Blackboard.postArtifact
+ * instead.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
+ @Deprecated
public void indexArtifact(BlackboardArtifact artifact) throws TskCoreException;
/**
- * Add the given Content object to the text index.
+ * Add the given Content object to the text index. This message should only
+ * be used in atypical cases, such as indexing a report. Artifacts are indexed
+ * when org.sleuthkit.datamodel.Blackboard.postArtifact
+ * is called and files are indexed during ingest.
*
* @param content The content to index.
*
diff --git a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java
index 34a7ec2702..250aab712d 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/dataSourceIntegrity/DataSourceIntegrityIngestModule.java
@@ -25,6 +25,7 @@ import java.util.List;
import java.util.logging.Level;
import javax.xml.bind.DatatypeConverter;
import java.util.Arrays;
+import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
@@ -38,6 +39,7 @@ import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskCoreException;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskDataException;
@@ -297,11 +299,12 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId());
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment));
- IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(DataSourceIntegrityModuleFactory.getModuleName(),
- BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED));
+ Case.getCurrentCase().getServices().getCaseBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
- }
+ } catch (Blackboard.BlackboardException ex) {
+ Exceptions.printStackTrace(ex);
+ }
}
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
index d8d0f882da..0ccd35cef9 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
@@ -34,21 +34,20 @@ import java.util.logging.Level;
import net.sf.sevenzipjbinding.ArchiveFormat;
import static net.sf.sevenzipjbinding.ArchiveFormat.RAR;
import net.sf.sevenzipjbinding.ExtractAskMode;
-import net.sf.sevenzipjbinding.ISequentialOutStream;
-import net.sf.sevenzipjbinding.ISevenZipInArchive;
-import net.sf.sevenzipjbinding.SevenZip;
-import net.sf.sevenzipjbinding.SevenZipException;
-import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.ICryptoGetTextPassword;
+import net.sf.sevenzipjbinding.ISequentialOutStream;
+import net.sf.sevenzipjbinding.ISevenZipInArchive;
import net.sf.sevenzipjbinding.PropID;
+import net.sf.sevenzipjbinding.SevenZip;
+import net.sf.sevenzipjbinding.SevenZipException;
+import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
import org.netbeans.api.progress.ProgressHandle;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
@@ -58,36 +57,43 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.EncodedFileOutputStream;
import org.sleuthkit.datamodel.ReadContentInputStream;
-import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
class SevenZipExtractor {
private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName());
- private IngestServices services = IngestServices.getInstance();
- private final IngestJobContext context;
- private final FileTypeDetector fileTypeDetector;
+ private static final String MODULE_NAME = EmbeddedFileExtractorModuleFactory.getModuleName();
+
//encryption type strings
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
+
//zip bomb detection
private static final int MAX_DEPTH = 4;
private static final int MAX_COMPRESSION_RATIO = 600;
private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L;
private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB
+ private IngestServices services = IngestServices.getInstance();
+ private final IngestJobContext context;
+ private final FileTypeDetector fileTypeDetector;
+
private String moduleDirRelative;
private String moduleDirAbsolute;
@@ -244,44 +250,43 @@ class SevenZipExtractor {
*/
private void flagRootArchiveAsZipBomb(Archive rootArchive, AbstractFile archiveFile, String details, String escapedFilePath) {
rootArchive.flagAsZipBomb();
- logger.log(Level.INFO, details); //NON-NLS
- String msg = NbBundle.getMessage(SevenZipExtractor.class,
- "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);
+ logger.log(Level.INFO, details);
try {
- Collection attributes = new ArrayList<>();
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
- "Possible Zip Bomb"));
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
- EmbeddedFileExtractorModuleFactory.getModuleName(),
- Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())));
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
- EmbeddedFileExtractorModuleFactory.getModuleName(),
- details));
+ Collection attributes = Arrays.asList(
+ new BlackboardAttribute(
+ TSK_SET_NAME, MODULE_NAME,
+ "Possible Zip Bomb"),
+ new BlackboardAttribute(
+ TSK_DESCRIPTION, MODULE_NAME,
+ Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())),
+ new BlackboardAttribute(
+ TSK_COMMENT, MODULE_NAME,
+ details));
- SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
- org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
- // Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(archiveFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
- BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
-
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(artifact);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of this
+ * new artifact
+ */
+ blackboard.postArtifact(artifact, MODULE_NAME);
+
+ String msg = NbBundle.getMessage(SevenZipExtractor.class,
+ "EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
+
+ services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
+
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
}
-
- services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
-
- services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS
- } catch (NoCurrentCaseException ex) {
- logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
}
}
@@ -467,13 +472,11 @@ class SevenZipExtractor {
}
/**
- * Unpack the file to local folder and return a list of derived files
+ * Unpack the file to local folder.
*
* @param archiveFile file to unpack
* @param depthMap - a concurrent hashmap which keeps track of the depth
* of all nested archives, key of objectID
- *
- * @return true if unpacking is complete
*/
void unpack(AbstractFile archiveFile, ConcurrentHashMap depthMap) {
unpack(archiveFile, depthMap, null);
@@ -510,7 +513,7 @@ class SevenZipExtractor {
//recursion depth check for zip bomb
Archive parentAr;
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
unpackSuccessful = false;
@@ -626,7 +629,7 @@ class SevenZipExtractor {
escapedArchiveFilePath, archiveItemPath);
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
- services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
+ services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, archiveItemPath}); //NON-NLS
logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS
unpackSuccessful = false;
@@ -654,7 +657,7 @@ class SevenZipExtractor {
localFile.createNewFile();
} catch (IOException e) {
logger.log(Level.SEVERE, "Error creating extracted file: "//NON-NLS
- + localFile.getAbsolutePath(), e);
+ + localFile.getAbsolutePath(), e);
}
}
} catch (SecurityException e) {
@@ -689,7 +692,7 @@ class SevenZipExtractor {
//inArchiveItemIndex. False indicates non-test mode
inArchive.extract(extractionIndices, false, archiveCallBack);
- unpackSuccessful = unpackSuccessful & archiveCallBack.wasSuccessful();
+ unpackSuccessful &= archiveCallBack.wasSuccessful();
archiveDetailsMap = null;
@@ -730,7 +733,7 @@ class SevenZipExtractor {
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
escapedArchiveFilePath, ex.getMessage());
- services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
+ services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
}
} finally {
if (inArchive != null) {
@@ -760,18 +763,21 @@ class SevenZipExtractor {
String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL;
try {
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
- artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType));
+ artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType));
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(artifact);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of this
+ * new artifact
+ */
+ blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
- logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
+ logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
}
- services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS
}
@@ -780,8 +786,8 @@ class SevenZipExtractor {
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
- currentArchiveName, EmbeddedFileExtractorModuleFactory.getModuleName());
- services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
+ currentArchiveName, MODULE_NAME);
+ services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
}
// adding unpacked extracted derived files to the job after closing relevant resources.
@@ -871,7 +877,7 @@ class SevenZipExtractor {
private final String localAbsPath;
private final String localRelPath;
- public InArchiveItemDetails(
+ InArchiveItemDetails(
SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode,
String localAbsPath, String localRelPath) {
this.unpackedNode = unpackedNode;
@@ -916,10 +922,10 @@ class SevenZipExtractor {
private boolean unpackSuccessful = true;
- public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
- AbstractFile archiveFile, ProgressHandle progressHandle,
- Map archiveDetailsMap,
- String password, long freeDiskSpace) {
+ StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
+ AbstractFile archiveFile, ProgressHandle progressHandle,
+ Map archiveDetailsMap,
+ String password, long freeDiskSpace) {
this.inArchive = inArchive;
this.progressHandle = progressHandle;
@@ -944,7 +950,7 @@ class SevenZipExtractor {
*/
@Override
public ISequentialOutStream getStream(int inArchiveItemIndex,
- ExtractAskMode mode) throws SevenZipException {
+ ExtractAskMode mode) throws SevenZipException {
this.inArchiveItemIndex = inArchiveItemIndex;
@@ -970,7 +976,7 @@ class SevenZipExtractor {
}
} catch (IOException ex) {
logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
- + "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
+ + "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
return null;
}
@@ -1002,7 +1008,7 @@ class SevenZipExtractor {
: accessTime.getTime() / 1000;
progressHandle.progress(archiveFile.getName() + ": "
- + (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
+ + (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
inArchiveItemIndex);
}
@@ -1017,6 +1023,7 @@ class SevenZipExtractor {
*/
@Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
+
final SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode
= archiveDetailsMap.get(inArchiveItemIndex).getUnpackedNode();
final String localRelPath = archiveDetailsMap.get(
@@ -1218,7 +1225,7 @@ class SevenZipExtractor {
if (existingFile == null) {
df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
- node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(),
+ node.isIsFile(), node.getParent().getFile(), "", MODULE_NAME,
"", "", TskData.EncodingType.XOR1);
statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS));
} else {
@@ -1232,7 +1239,7 @@ class SevenZipExtractor {
String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType();
df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
- node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(),
+ node.isIsFile(), mimeType, "", MODULE_NAME,
"", "", TskData.EncodingType.XOR1);
} else {
//ALREADY CURRENT - SKIP
@@ -1327,8 +1334,8 @@ class SevenZipExtractor {
}
void addDerivedInfo(long size,
- boolean isFile,
- long ctime, long crtime, long atime, long mtime, String relLocalPath) {
+ boolean isFile,
+ long ctime, long crtime, long atime, long mtime, String relLocalPath) {
this.size = size;
this.isFile = isFile;
this.ctime = ctime;
diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java
index 01b97b6254..8b249b16c1 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionDataSourceIngestModule.java
@@ -19,26 +19,24 @@
package org.sleuthkit.autopsy.modules.encryptiondetection;
import java.io.IOException;
-import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
-import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
-import org.sleuthkit.autopsy.ingest.IngestModule;
-import org.sleuthkit.datamodel.Content;
-import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
+import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
+import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
@@ -57,8 +55,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/**
* Create an EncryptionDetectionDataSourceIngestModule object that will
* detect volumes that are encrypted and create blackboard artifacts as
- * appropriate. The supplied EncryptionDetectionIngestJobSettings object is
- * used to configure the module.
+ * appropriate.
+ *
+ * @param settings The Settings used to configure the module.
*/
EncryptionDetectionDataSourceIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy();
@@ -67,7 +66,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
@Override
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
validateSettings();
- blackboard = Case.getCurrentCase().getServices().getBlackboard();
+ blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
this.context = context;
}
@@ -144,9 +143,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/**
* Create a blackboard artifact.
*
- * @param volume The volume to be processed.
+ * @param volume The volume to be processed.
* @param artifactType The type of artifact to create.
- * @param comment A comment to be attached to the artifact.
+ * @param comment A comment to be attached to the artifact.
*
* @return 'OK' if the volume was processed successfully, or 'ERROR' if
* there was a problem.
@@ -163,18 +162,14 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
try {
/*
- * Index the artifact for keyword search.
+ * post the artifact which will index the artifact for keyword
+ * search, and fire an event to notify UI of this new artifact
*/
- blackboard.indexArtifact(artifact);
+ blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}
- /*
- * Send an event to update the view with the new result.
- */
- services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
-
/*
* Make an ingest inbox message.
*/
diff --git a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java
index e884baa55c..9afafb7831 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/encryptiondetection/EncryptionDetectionFileIngestModule.java
@@ -25,13 +25,11 @@ import com.healthmarketscience.jackcess.InvalidCredentialsException;
import com.healthmarketscience.jackcess.impl.CodecProvider;
import com.healthmarketscience.jackcess.impl.UnsupportedCodecException;
import com.healthmarketscience.jackcess.util.MemFileChannel;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.logging.Level;
-import org.sleuthkit.datamodel.ReadContentInputStream;
import java.io.BufferedInputStream;
+import java.io.IOException;
import java.io.InputStream;
import java.nio.BufferUnderflowException;
+import java.util.logging.Level;
import org.apache.tika.exception.EncryptedDocumentException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
@@ -41,18 +39,18 @@ import org.apache.tika.sax.BodyContentHandler;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@@ -93,9 +91,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
/**
* Create a EncryptionDetectionFileIngestModule object that will detect
* files that are either encrypted or password protected and create
- * blackboard artifacts as appropriate. The supplied
- * EncryptionDetectionIngestJobSettings object is used to configure the
- * module.
+ * blackboard artifacts as appropriate.
+ *
+ * @param settings The settings used to configure the module.
*/
EncryptionDetectionFileIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy();
@@ -108,8 +106,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
try {
validateSettings();
- this.context = context;
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ this.context = context;
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
+
fileTypeDetector = new FileTypeDetector();
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
throw new IngestModule.IngestModuleException("Failed to create file type detector", ex);
@@ -131,12 +130,12 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* verify the file hasn't been deleted.
*/
if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
- && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
- && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
- && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
- && (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
- && !file.getKnown().equals(TskData.FileKnown.KNOWN)
- && !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
+ && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
+ && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
+ && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
+ && (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
+ && !file.getKnown().equals(TskData.FileKnown.KNOWN)
+ && !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
/*
* Is the file in FILE_IGNORE_LIST?
*/
@@ -206,18 +205,14 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
try {
/*
- * Index the artifact for keyword search.
+ * post the artifact which will index the artifact for keyword
+ * search, and fire an event to notify UI of this new artifact
*/
- blackboard.indexArtifact(artifact);
+ blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}
- /*
- * Send an event to update the view with the new result.
- */
- services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
-
/*
* Make an ingest inbox message.
*/
diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/exif/Bundle.properties-MERGED
index 9905159d99..5971ab0d82 100755
--- a/Core/src/org/sleuthkit/autopsy/modules/exif/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/modules/exif/Bundle.properties-MERGED
@@ -1,5 +1,5 @@
CannotRunFileTypeDetection=Cannot run file type detection.
-ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search.
+ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s).
OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=\
Exif metadata ingest module. \n\n\
diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
index c8ded79a50..885468f483 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java
@@ -28,39 +28,38 @@ import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
-import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
-import java.util.List;
import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
-import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF;
import org.sleuthkit.datamodel.BlackboardAttribute;
-import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MAKE;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MODEL;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
-import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@@ -70,20 +69,16 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
* files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact.
*/
-@NbBundle.Messages({
- "CannotRunFileTypeDetection=Cannot run file type detection."
-})
+@NbBundle.Messages({"CannotRunFileTypeDetection=Cannot run file type detection."})
public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
- private final IngestServices services = IngestServices.getInstance();
- private final AtomicInteger filesProcessed = new AtomicInteger(0);
+ private static final String MODULE_NAME = ExifParserModuleFactory.getModuleName();
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector;
private final HashSet supportedMimeTypes = new HashSet<>();
private TimeZone timeZone = null;
- private Case currentCase;
private Blackboard blackboard;
ExifParserFileIngestModule() {
@@ -103,18 +98,18 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
}
+ @Messages({"ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s)."})
@Override
public ProcessResult process(AbstractFile content) {
try {
- currentCase = Case.getCurrentCaseThrows();
- blackboard = currentCase.getServices().getBlackboard();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
//skip unalloc
if ((content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
- || (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
+ || (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
return ProcessResult.OK;
}
@@ -135,14 +130,9 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
return processFile(content);
}
- @Messages({"ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search."})
- ProcessResult processFile(AbstractFile file) {
- InputStream in = null;
- BufferedInputStream bin = null;
+ private ProcessResult processFile(AbstractFile file) {
- try {
- in = new ReadContentInputStream(file);
- bin = new BufferedInputStream(in);
+ try (BufferedInputStream bin = new BufferedInputStream(new ReadContentInputStream(file));) {
Collection attributes = new ArrayList<>();
Metadata metadata = ImageMetadataReader.readMetadata(bin);
@@ -165,7 +155,7 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
if (date != null) {
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
+ attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, MODULE_NAME, date.getTime() / 1000));
}
}
@@ -174,15 +164,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) {
- double latitude = loc.getLatitude();
- double longitude = loc.getLongitude();
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, ExifParserModuleFactory.getModuleName(), latitude));
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, ExifParserModuleFactory.getModuleName(), longitude));
+ attributes.add(new BlackboardAttribute(TSK_GEO_LATITUDE, MODULE_NAME, loc.getLatitude()));
+ attributes.add(new BlackboardAttribute(TSK_GEO_LONGITUDE, MODULE_NAME, loc.getLongitude()));
}
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
if (altitude != null) {
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
+ attributes.add(new BlackboardAttribute(TSK_GEO_ALTITUDE, MODULE_NAME, altitude.doubleValue()));
}
}
@@ -191,36 +179,30 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (StringUtils.isNotBlank(model)) {
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
+ attributes.add(new BlackboardAttribute(TSK_DEVICE_MODEL, MODULE_NAME, model));
}
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
if (StringUtils.isNotBlank(make)) {
- attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
+ attributes.add(new BlackboardAttribute(TSK_DEVICE_MAKE, MODULE_NAME, make));
}
}
// Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) {
- SleuthkitCase tskCase = currentCase.getSleuthkitCase();
- org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF, attributes)) {
- BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
+ if (!blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) {
+ BlackboardArtifact bba = file.newArtifact(TSK_METADATA_EXIF);
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
- blackboard.indexArtifact(bba);
+ blackboard.postArtifact(bba, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.ExifParserFileIngestModule_indexError_message(), bba.getDisplayName());
}
-
- services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(),
- BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF,
- Collections.singletonList(bba)));
}
}
@@ -237,24 +219,12 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
} catch (IOException ex) {
logger.log(Level.WARNING, String.format("IOException when parsing image file '%s/%s' (id=%d).", file.getParentPath(), file.getName(), file.getId()), ex); //NON-NLS
return ProcessResult.ERROR;
- } finally {
- try {
- if (in != null) {
- in.close();
- }
- if (bin != null) {
- bin.close();
- }
- } catch (IOException ex) {
- logger.log(Level.WARNING, "Failed to close InputStream.", ex); //NON-NLS
- return ProcessResult.ERROR;
- }
}
}
/**
- * Checks if should try to attempt to extract exif. Currently checks if JPEG
- * image (by signature)
+ * Checks if should try to attempt to extract exif. Currently checks if
+ * JPEG, TIFF or X-WAV (by signature)
*
* @param f file to be checked
*
diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java
index 6f2281c45d..0fac779ca4 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java
@@ -18,7 +18,6 @@
*/
package org.sleuthkit.autopsy.modules.fileextmismatch;
-import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
import java.util.logging.Level;
@@ -26,7 +25,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
@@ -34,10 +32,10 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleSettings.CHECK_TYPE;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.TskData;
@@ -110,7 +108,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
@Messages({"FileExtMismatchIngestModule.indexError.message=Failed to index file extension mismatch artifact for keyword search."})
public ProcessResult process(AbstractFile abstractFile) {
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
@@ -121,15 +119,15 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
- || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
- || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
- || (abstractFile.isFile() == false)) {
+ || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
+ || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
+ || (abstractFile.isFile() == false)) {
return ProcessResult.OK;
}
// deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC))
- || (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
+ || (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ProcessResult.OK;
}
@@ -145,14 +143,17 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED);
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(bart);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of this
+ * new artifact
+ */
+ blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());
}
- services.fireModuleDataEvent(new ModuleDataEvent(FileExtMismatchDetectorModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart)));
}
return ProcessResult.OK;
} catch (TskException ex) {
diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
index f79c10a20c..1af1825c6d 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java
@@ -18,40 +18,42 @@
*/
package org.sleuthkit.autopsy.modules.filetypeid;
-import java.util.ArrayList;
-import java.util.Collection;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
-import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
+import org.sleuthkit.autopsy.ingest.IngestServices;
+import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFileTypesException;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Detects the type of a file based on signature (magic) values. Posts results
* to the blackboard.
*/
-@NbBundle.Messages({
- "CannotRunFileTypeDetection=Unable to run file type detection."
-})
+@NbBundle.Messages({"CannotRunFileTypeDetection=Unable to run file type detection."})
public class FileTypeIdIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
- private long jobId;
private static final HashMap totalsForIngestJobs = new HashMap<>();
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
+
+ private long jobId;
private FileTypeDetector fileTypeDetector;
/**
@@ -146,26 +148,34 @@ public class FileTypeIdIngestModule implements FileIngestModule {
* @param fileType The file type rule for categorizing the hit.
*/
private void createInterestingFileHit(AbstractFile file, FileType fileType) {
+
+ List attributes = Arrays.asList(
+ new BlackboardAttribute(
+ TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(),
+ fileType.getInterestingFilesSetName()),
+ new BlackboardAttribute(
+ TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(),
+ fileType.getMimeType()));
try {
- Collection attributes = new ArrayList<>();
- attributes.add(new BlackboardAttribute(
- BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()));
- attributes.add(new BlackboardAttribute(
- BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()));
-
Case currentCase = Case.getCurrentCaseThrows();
- org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+
+ Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
- BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
-
try {
- currentCase.getServices().getBlackboard().indexArtifact(artifact);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of this
+ * new artifact
+ */
+ tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
}
}
+
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
@@ -227,5 +237,4 @@ public class FileTypeIdIngestModule implements FileIngestModule {
long matchTime = 0;
long numFiles = 0;
}
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
index 23749b8cba..a562ab2a22 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
@@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
@@ -30,7 +29,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
@@ -39,9 +37,9 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
@@ -102,7 +100,7 @@ public class HashDbIngestModule implements FileIngestModule {
* object is used to configure the module.
*
* @param settings The module settings.
- *
+ *
* @throws NoCurrentCaseException If there is no open case.
*/
HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
@@ -170,7 +168,7 @@ public class HashDbIngestModule implements FileIngestModule {
@Override
public ProcessResult process(AbstractFile file) {
try {
- blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
@@ -178,7 +176,7 @@ public class HashDbIngestModule implements FileIngestModule {
// Skip unallocated space files.
if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
- || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
+ || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
return ProcessResult.OK;
}
@@ -356,8 +354,11 @@ public class HashDbIngestModule implements FileIngestModule {
badFile.addAttributes(attributes);
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(badFile);
+ /*
+ * post the artifact which will index the artifact for keyword
+ * search, and fire an event to notify UI of this new artifact
+ */
+ blackboard.postArtifact(badFile, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@@ -400,7 +401,6 @@ public class HashDbIngestModule implements FileIngestModule {
abstractFile.getName() + md5Hash,
badFile));
}
- services.fireModuleDataEvent(new ModuleDataEvent(moduleName, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
} catch (TskException ex) {
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
}
@@ -414,7 +414,7 @@ public class HashDbIngestModule implements FileIngestModule {
* @param knownHashSets The list of hash sets for "known" files.
*/
private static synchronized void postSummary(long jobId,
- List knownBadHashSets, List knownHashSets) {
+ List knownBadHashSets, List knownHashSets) {
IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
totalsForIngestJobs.remove(jobId);
diff --git a/Core/src/org/sleuthkit/autopsy/modules/iOS/CallLogAnalyzer.java b/Core/src/org/sleuthkit/autopsy/modules/iOS/CallLogAnalyzer.java
index 0c36938eb6..908673f466 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/iOS/CallLogAnalyzer.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/iOS/CallLogAnalyzer.java
@@ -30,12 +30,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
@@ -58,7 +58,7 @@ final class CallLogAnalyzer {
/**
* Find call logs given an ingest job context and index the results.
- *
+ *
* @param context The ingest job context.
*/
public void findCallLogs(IngestJobContext context) {
@@ -69,7 +69,7 @@ final class CallLogAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return;
}
- blackboard = openCase.getServices().getBlackboard();
+ blackboard = openCase.getSleuthkitCase().getBlackboard();
List absFiles;
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
@@ -98,7 +98,7 @@ final class CallLogAnalyzer {
/**
* Index results for call logs found in the database.
- *
+ *
* @param DatabasePath The path to the database.
* @param fileId The ID of the file associated with artifacts.
*/
@@ -162,8 +162,12 @@ final class CallLogAnalyzer {
bba.addAttributes(attributes);
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(bba);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of
+ * this new artifact
+ */
+ blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@@ -184,7 +188,5 @@ final class CallLogAnalyzer {
} catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); //NON-NLS
}
-
}
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/iOS/ContactAnalyzer.java b/Core/src/org/sleuthkit/autopsy/modules/iOS/ContactAnalyzer.java
index ce3f971bee..649faf220f 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/iOS/ContactAnalyzer.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/iOS/ContactAnalyzer.java
@@ -35,12 +35,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
@@ -75,7 +75,7 @@ final class ContactAnalyzer {
return;
}
- blackboard = openCase.getServices().getBlackboard();
+ blackboard = openCase.getSleuthkitCase().getBlackboard();
List absFiles;
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
@@ -183,7 +183,7 @@ final class ContactAnalyzer {
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
- blackboard.indexArtifact(bba);
+ blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
diff --git a/Core/src/org/sleuthkit/autopsy/modules/iOS/TextMessageAnalyzer.java b/Core/src/org/sleuthkit/autopsy/modules/iOS/TextMessageAnalyzer.java
index c6288f0933..c541bf608a 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/iOS/TextMessageAnalyzer.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/iOS/TextMessageAnalyzer.java
@@ -31,12 +31,12 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
@@ -73,7 +73,7 @@ class TextMessageAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return;
}
- blackboard = openCase.getServices().getBlackboard();
+ blackboard = openCase.getSleuthkitCase().getBlackboard();
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
absFiles = skCase.findAllFilesWhere("name ='mmssms.db'"); //NON-NLS //get exact file name
@@ -168,8 +168,11 @@ class TextMessageAnalyzer {
bba.addAttributes(attributes);
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(bba);
+ /*
+ * post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of
+ * this new artifact
+ */ blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@@ -191,7 +194,5 @@ class TextMessageAnalyzer {
} catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing text messages to Blackboard", e); //NON-NLS
}
-
}
-
}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/iOS/iOSModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/iOS/iOSModuleFactory.java
index 8cfe0cf9ac..942dca5379 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/iOS/iOSModuleFactory.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/iOS/iOSModuleFactory.java
@@ -18,10 +18,8 @@
*/
package org.sleuthkit.autopsy.modules.iOS;
-import org.openide.util.lookup.ServiceProvider;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Version;
-import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
index 0e0160b9b9..c3c9d73d85 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesIdentifierIngestModule.java
@@ -19,8 +19,8 @@
package org.sleuthkit.autopsy.modules.interestingitems;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
-import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -29,7 +29,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
@@ -37,10 +36,13 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
-import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@@ -48,15 +50,15 @@ import org.sleuthkit.datamodel.TskData;
* A file ingest module that generates interesting files set hit artifacts for
* files that match interesting files set definitions.
*/
-@NbBundle.Messages({
- "FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."
-})
+@NbBundle.Messages({"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."})
final class FilesIdentifierIngestModule implements FileIngestModule {
private static final Object sharedResourcesLock = new Object();
private static final Logger logger = Logger.getLogger(FilesIdentifierIngestModule.class.getName());
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final Map> interestingFileSetsByJob = new ConcurrentHashMap<>();
+ private static final String MODULE_NAME = InterestingItemsIngestModuleFactory.getModuleName();
+
private final FilesIdentifierIngestJobSettings settings;
private final IngestServices services = IngestServices.getInstance();
private IngestJobContext context;
@@ -72,9 +74,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
this.settings = settings;
}
- /**
- * @inheritDoc
- */
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
@@ -100,21 +99,16 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
}
}
- /**
- * @inheritDoc
- */
@Override
@Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."})
public ProcessResult process(AbstractFile file) {
- Case currentCase;
try {
- currentCase = Case.getCurrentCaseThrows();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
- blackboard = currentCase.getServices().getBlackboard();
-
+
// Skip slack space files.
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) {
return ProcessResult.OK;
@@ -126,48 +120,46 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
String ruleSatisfied = filesSet.fileIsMemberOf(file);
if (ruleSatisfied != null) {
try {
- // Post an interesting files set hit artifact to the
- // blackboard.
- String moduleName = InterestingItemsIngestModuleFactory.getModuleName();
-
- Collection attributes = new ArrayList<>();
- // Add a set name attribute to the artifact. This adds a
- // fair amount of redundant data to the attributes table
- // (i.e., rows that differ only in artifact id), but doing
- // otherwise would requires reworking the interesting files
- // set hit artifact.
- BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, filesSet.getName());
- attributes.add(setNameAttribute);
+ Collection attributes = Arrays.asList(
+ /*
+ * Add a set name attribute to the artifact. This
+ * adds a fair amount of redundant data to the
+ * attributes table (i.e., rows that differ only in
+ * artifact id), but doing otherwise would requires
+ * reworking the interesting files set hit artifact. */
+ new BlackboardAttribute(
+ TSK_SET_NAME, MODULE_NAME,
+ filesSet.getName()),
+ /*
+ * Add a category attribute to the artifact to
+ * record the interesting files set membership rule
+ * that was satisfied. */
+ new BlackboardAttribute(
+ TSK_CATEGORY, MODULE_NAME,
+ ruleSatisfied)
+ );
- // Add a category attribute to the artifact to record the
- // interesting files set membership rule that was satisfied.
- BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, moduleName, ruleSatisfied);
- attributes.add(ruleNameAttribute);
-
- org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
- BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
-
try {
- // index the artifact for keyword search
- blackboard.indexArtifact(artifact);
+
+ // Post thet artifact to the blackboard.
+ blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
}
- services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
-
// make an ingest inbox message
StringBuilder detailsSb = new StringBuilder();
- detailsSb.append("File: " + file.getParentPath() + file.getName() + "
\n");
- detailsSb.append("Rule Set: " + filesSet.getName());
+ detailsSb.append("File: ").append(file.getParentPath()).append(file.getName()).append("
\n");
+ detailsSb.append("Rule Set: ").append(filesSet.getName());
services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
- "Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
+ "Interesting File Match: " + filesSet.getName() + "(" + file.getName() + ")",
detailsSb.toString(),
file.getName(),
artifact));
@@ -180,9 +172,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
return ProcessResult.OK;
}
- /**
- * @inheritDoc
- */
@Override
public void shutDown() {
if (context != null) {
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties
new file mode 100755
index 0000000000..2d29a29b49
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties
@@ -0,0 +1,5 @@
+PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
+PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
+PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
+PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
+PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties-MERGED
new file mode 100755
index 0000000000..c823f391fc
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties-MERGED
@@ -0,0 +1,29 @@
+# {0} - file that events are from
+PlasoIngestModule.artifact.progress=Adding events to case: {0}
+PlasoIngestModule.bad.imageFile=Cannot find image file name and path
+PlasoIngestModule.completed=Plaso Processing Completed
+PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation
+PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image.
+PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.
+PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.
+PlasoIngestModule.error.running.psort=Error running Psort, see log file.
+PlasoIngestModule.event.datetime=Event Date Time
+PlasoIngestModule.event.description=Event Description
+PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.
+PlasoIngestModule.executable.not.found=Plaso Executable Not Found.
+PlasoIngestModule.has.run=Plaso Plugin has been run.
+PlasoIngestModule.info.empty.database=Plaso database was empty.
+PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled
+PlasoIngestModule.psort.cancelled=psort run was canceled
+PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete.
+PlasoIngestModule.requires.windows=Plaso module requires windows.
+PlasoIngestModule.running.psort=Running Psort
+PlasoIngestModule.starting.log2timeline=Starting Log2timeline
+PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings
+PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source.
+PlasoModuleFactory_moduleName=Plaso
+PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
+PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
+PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
+PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
+PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java
new file mode 100755
index 0000000000..873f8adc15
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java
@@ -0,0 +1,478 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018-2019 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.plaso;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Locale;
+import static java.util.Objects.nonNull;
+import java.util.logging.Level;
+import java.util.stream.Collectors;
+import org.openide.modules.InstalledFileLocator;
+import org.openide.util.Cancellable;
+import org.openide.util.NbBundle;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.services.FileManager;
+import org.sleuthkit.autopsy.coreutils.ExecUtil;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
+import org.sleuthkit.autopsy.coreutils.PlatformUtil;
+import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
+import org.sleuthkit.autopsy.ingest.IngestJobContext;
+import org.sleuthkit.autopsy.ingest.IngestMessage;
+import org.sleuthkit.autopsy.ingest.IngestServices;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
+import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.datamodel.TimelineEventType;
+
+/**
+ * Data source ingest module that runs Plaso against the image.
+ */
+public class PlasoIngestModule implements DataSourceIngestModule {
+
+ private static final Logger logger = Logger.getLogger(PlasoIngestModule.class.getName());
+ private static final String MODULE_NAME = PlasoModuleFactory.getModuleName();
+
+ private static final String PLASO = "plaso"; //NON-NLS
+ private static final String PLASO64 = "plaso-20180818-amd64";//NON-NLS
+ private static final String PLASO32 = "plaso-20180818-win32";//NON-NLS
+ private static final String LOG2TIMELINE_EXECUTABLE = "Log2timeline.exe";//NON-NLS
+ private static final String PSORT_EXECUTABLE = "psort.exe";//NON-NLS
+ private static final String COOKIE = "cookie";//NON-NLS
+ private static final int LOG2TIMELINE_WORKERS = 2;
+
+ private File log2TimeLineExecutable;
+ private File psortExecutable;
+
+ private final PlasoModuleSettings settings;
+ private IngestJobContext context;
+ private Case currentCase;
+ private FileManager fileManager;
+
+ private Image image;
+ private AbstractFile previousFile = null; // cache used when looking up files in Autopsy DB
+
+ PlasoIngestModule(PlasoModuleSettings settings) {
+ this.settings = settings;
+ }
+
+ @NbBundle.Messages({
+ "PlasoIngestModule.executable.not.found=Plaso Executable Not Found.",
+ "PlasoIngestModule.requires.windows=Plaso module requires windows.",
+ "PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image."})
+ @Override
+ public void startUp(IngestJobContext context) throws IngestModuleException {
+ this.context = context;
+
+ if (false == PlatformUtil.isWindowsOS()) {
+ throw new IngestModuleException(Bundle.PlasoIngestModule_requires_windows());
+ }
+
+ try {
+ log2TimeLineExecutable = locateExecutable(LOG2TIMELINE_EXECUTABLE);
+ psortExecutable = locateExecutable(PSORT_EXECUTABLE);
+ } catch (FileNotFoundException exception) {
+ logger.log(Level.WARNING, "Plaso executable not found.", exception); //NON-NLS
+ throw new IngestModuleException(Bundle.PlasoIngestModule_executable_not_found(), exception);
+ }
+
+ Content dataSource = context.getDataSource();
+ if (!(dataSource instanceof Image)) {
+ throw new IngestModuleException(Bundle.PlasoIngestModule_dataSource_not_an_image());
+ }
+ image = (Image) dataSource;
+ }
+
+ @NbBundle.Messages({
+ "PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.",
+ "PlasoIngestModule.error.running.psort=Error running Psort, see log file.",
+ "PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.",
+ "PlasoIngestModule.starting.log2timeline=Starting Log2timeline",
+ "PlasoIngestModule.running.psort=Running Psort",
+ "PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled",
+ "PlasoIngestModule.psort.cancelled=psort run was canceled",
+ "PlasoIngestModule.bad.imageFile=Cannot find image file name and path",
+ "PlasoIngestModule.completed=Plaso Processing Completed",
+ "PlasoIngestModule.has.run=Plaso Plugin has been run.",
+ "PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete."})
+ @Override
+ public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
+ assert dataSource.equals(image);
+
+ statusHelper.switchToDeterminate(100);
+ currentCase = Case.getCurrentCase();
+ fileManager = currentCase.getServices().getFileManager();
+
+ String currentTime = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss z", Locale.US).format(System.currentTimeMillis());//NON-NLS
+ Path moduleOutputPath = Paths.get(currentCase.getModuleDirectory(), PLASO, currentTime);
+ try {
+ Files.createDirectories(moduleOutputPath);
+ } catch (IOException ex) {
+ logger.log(Level.SEVERE, "Error creating Plaso module output directory.", ex); //NON-NLS
+ return ProcessResult.ERROR;
+ }
+
+ // Run log2timeline
+ logger.log(Level.INFO, "Starting Plaso Run.");//NON-NLS
+ statusHelper.progress(Bundle.PlasoIngestModule_starting_log2timeline(), 0);
+ ProcessBuilder log2TimeLineCommand = buildLog2TimeLineCommand(moduleOutputPath, image);
+ try {
+ Process log2TimeLineProcess = log2TimeLineCommand.start();
+ try (BufferedReader log2TimeLineOutpout = new BufferedReader(new InputStreamReader(log2TimeLineProcess.getInputStream()))) {
+ L2TStatusProcessor statusReader = new L2TStatusProcessor(log2TimeLineOutpout, statusHelper, moduleOutputPath);
+ new Thread(statusReader, "log2timeline status reader").start(); //NON-NLS
+ ExecUtil.waitForTermination(LOG2TIMELINE_EXECUTABLE, log2TimeLineProcess, new DataSourceIngestModuleProcessTerminator(context));
+ statusReader.cancel();
+ }
+
+ if (context.dataSourceIngestIsCancelled()) {
+ logger.log(Level.INFO, "Log2timeline run was canceled"); //NON-NLS
+ return ProcessResult.OK;
+ }
+ if (Files.notExists(moduleOutputPath.resolve(PLASO))) {
+ logger.log(Level.WARNING, "Error running log2timeline: there was no storage file."); //NON-NLS
+ return ProcessResult.ERROR;
+ }
+
+ // sort the output
+ statusHelper.progress(Bundle.PlasoIngestModule_running_psort(), 33);
+ ProcessBuilder psortCommand = buildPsortCommand(moduleOutputPath);
+ int result = ExecUtil.execute(psortCommand, new DataSourceIngestModuleProcessTerminator(context));
+ if (result != 0) {
+ logger.log(Level.SEVERE, String.format("Error running Psort, error code returned %d", result)); //NON-NLS
+ MessageNotifyUtil.Notify.error(MODULE_NAME, Bundle.PlasoIngestModule_psort_fail());
+ return ProcessResult.ERROR;
+ }
+
+ if (context.dataSourceIngestIsCancelled()) {
+ logger.log(Level.INFO, "psort run was canceled"); //NON-NLS
+ return ProcessResult.OK;
+ }
+ Path plasoFile = moduleOutputPath.resolve("plasodb.db3"); //NON-NLS
+ if (Files.notExists(plasoFile)) {
+ logger.log(Level.SEVERE, "Error running Psort: there was no sqlite db file."); //NON-NLS
+ return ProcessResult.ERROR;
+ }
+
+ // parse the output and make artifacts
+ createPlasoArtifacts(plasoFile.toString(), statusHelper);
+
+ } catch (IOException ex) {
+ logger.log(Level.SEVERE, "Error running Plaso.", ex);//NON-NLS
+ return ProcessResult.ERROR;
+ }
+
+ IngestMessage message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
+ Bundle.PlasoIngestModule_has_run(),
+ Bundle.PlasoIngestModule_completed());
+ IngestServices.getInstance().postMessage(message);
+ return ProcessResult.OK;
+ }
+
+ private ProcessBuilder buildLog2TimeLineCommand(Path moduleOutputPath, Image image) {
+ //make a csv list of disabled parsers.
+ String parsersString = settings.getParsers().entrySet().stream()
+ .filter(entry -> entry.getValue() == false)
+ .map(entry -> "!" + entry.getKey()) // '!' prepended to parsername disables it. //NON-NLS
+ .collect(Collectors.joining(","));//NON-NLS
+
+ ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
+ "\"" + log2TimeLineExecutable + "\"", //NON-NLS
+ "--vss-stores", "all", //NON-NLS
+ "-z", image.getTimeZone(), //NON-NLS
+ "--partitions", "all", //NON-NLS
+ "--hasher_file_size_limit", "1", //NON-NLS
+ "--hashers", "none", //NON-NLS
+ "--parsers", "\"" + parsersString + "\"",//NON-NLS
+ "--no_dependencies_check", //NON-NLS
+ "--workers", String.valueOf(LOG2TIMELINE_WORKERS),//NON-NLS
+ moduleOutputPath.resolve(PLASO).toString(),
+ image.getPaths()[0]
+ );
+ processBuilder.redirectError(moduleOutputPath.resolve("log2timeline_err.txt").toFile()); //NON-NLS
+ return processBuilder;
+ }
+
+ static private ProcessBuilder buildProcessWithRunAsInvoker(String... commandLine) {
+ ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
+ /* Add an environment variable to force log2timeline/psort to run with
+ * the same permissions Autopsy uses. */
+ processBuilder.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS
+ return processBuilder;
+ }
+
+ private ProcessBuilder buildPsortCommand(Path moduleOutputPath) {
+ ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
+ "\"" + psortExecutable + "\"", //NON-NLS
+ "-o", "4n6time_sqlite", //NON-NLS
+ "-w", moduleOutputPath.resolve("plasodb.db3").toString(), //NON-NLS
+ moduleOutputPath.resolve(PLASO).toString()
+ );
+
+ processBuilder.redirectOutput(moduleOutputPath.resolve("psort_output.txt").toFile()); //NON-NLS
+ processBuilder.redirectError(moduleOutputPath.resolve("psort_err.txt").toFile()); //NON-NLS
+ return processBuilder;
+ }
+
+ private static File locateExecutable(String executableName) throws FileNotFoundException {
+ String architectureFolder = PlatformUtil.is64BitOS() ? PLASO64 : PLASO32;
+ String executableToFindName = Paths.get(PLASO, architectureFolder, executableName).toString();
+
+ File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PlasoIngestModule.class.getPackage().getName(), false);
+ if (null == exeFile || exeFile.canExecute() == false) {
+ throw new FileNotFoundException(executableName + " executable not found.");
+ }
+ return exeFile;
+ }
+
+ @NbBundle.Messages({
+ "PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.",
+ "PlasoIngestModule.event.datetime=Event Date Time",
+ "PlasoIngestModule.event.description=Event Description",
+ "PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation ",
+ "# {0} - file that events are from",
+ "PlasoIngestModule.artifact.progress=Adding events to case: {0}",
+ "PlasoIngestModule.info.empty.database=Plaso database was empty.",
+ })
+ private void createPlasoArtifacts(String plasoDb, DataSourceIngestModuleProgress statusHelper) {
+ Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
+
+ String sqlStatement = "SELECT substr(filename,1) AS filename, "
+ + " strftime('%s', datetime) AS epoch_date, "
+ + " description, "
+ + " source, "
+ + " type, "
+ + " sourcetype "
+ + " FROM log2timeline "
+ + " WHERE source NOT IN ('FILE', "
+ + " 'WEBHIST') " // bad dates and duplicates with what we have.
+ + " AND sourcetype NOT IN ('UNKNOWN', "
+ + " 'PE Import Time');"; // lots of bad dates //NON-NLS
+
+ try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + plasoDb); //NON-NLS
+ ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) {
+
+ boolean dbHasData = false;
+
+ while (resultSet.next()) {
+ dbHasData = true;
+
+ if (context.dataSourceIngestIsCancelled()) {
+ logger.log(Level.INFO, "Cancelled Plaso Artifact Creation."); //NON-NLS
+ return;
+ }
+
+ String currentFileName = resultSet.getString("filename"); //NON-NLS
+ statusHelper.progress(Bundle.PlasoIngestModule_artifact_progress(currentFileName), 66);
+ Content resolvedFile = getAbstractFile(currentFileName);
+ if (resolvedFile == null) {
+ logger.log(Level.INFO, "File {0} from Plaso output not found in case. Associating it with the data source instead.", currentFileName);//NON-NLS
+ resolvedFile = image;
+ }
+
+ String description = resultSet.getString("description");
+ TimelineEventType eventType = findEventSubtype(currentFileName, resultSet);
+
+ // If the description is empty use the event type display name
+ // as the description.
+ if ( description == null || description.isEmpty() ) {
+ if (eventType != TimelineEventType.OTHER) {
+ description = eventType.getDisplayName();
+ } else {
+ continue;
+ }
+ }
+
+ Collection bbattributes = Arrays.asList(
+ new BlackboardAttribute(
+ TSK_DATETIME, MODULE_NAME,
+ resultSet.getLong("epoch_date")), //NON-NLS
+ new BlackboardAttribute(
+ TSK_DESCRIPTION, MODULE_NAME,
+ description),//NON-NLS
+ new BlackboardAttribute(
+ TSK_TL_EVENT_TYPE, MODULE_NAME,
+ eventType.getTypeID()));
+
+ try {
+ BlackboardArtifact bbart = resolvedFile.newArtifact(TSK_TL_EVENT);
+ bbart.addAttributes(bbattributes);
+ try {
+ /* Post the artifact which will index the artifact for
+ * keyword search, and fire an event to notify UI of
+ * this new artifact */
+ blackboard.postArtifact(bbart, MODULE_NAME);
+ } catch (BlackboardException ex) {
+ logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
+ }
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS
+ }
+ }
+
+ // Check if there is data the db
+ if( !dbHasData ) {
+ logger.log(Level.INFO, String.format("PlasoDB was empty: %s", plasoDb));
+ MessageNotifyUtil.Notify.info(MODULE_NAME, Bundle.PlasoIngestModule_info_empty_database());
+ }
+ } catch (SQLException ex) {
+ logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS
+ }
+ }
+
+ private AbstractFile getAbstractFile(String file) {
+
+ Path path = Paths.get(file);
+ String fileName = path.getFileName().toString();
+ String filePath = path.getParent().toString().replaceAll("\\\\", "/");//NON-NLS
+ if (filePath.endsWith("/") == false) {//NON-NLS
+ filePath += "/";//NON-NLS
+ }
+
+ // check the cached file
+ //TODO: would we reduce 'cache misses' if we retrieved the events sorted by file? Is that overhead worth it?
+ if (previousFile != null
+ && previousFile.getName().equalsIgnoreCase(fileName)
+ && previousFile.getParentPath().equalsIgnoreCase(filePath)) {
+ return previousFile;
+
+ }
+ try {
+ List abstractFiles = fileManager.findFiles(fileName, filePath);
+ if (abstractFiles.size() == 1) {// TODO: why do we bother with this check. also we don't cache the file...
+ return abstractFiles.get(0);
+ }
+ for (AbstractFile resolvedFile : abstractFiles) {
+ // double check its an exact match
+ if (filePath.equalsIgnoreCase(resolvedFile.getParentPath())) {
+ // cache it for next time
+ previousFile = resolvedFile;
+ return resolvedFile;
+ }
+ }
+ } catch (TskCoreException ex) {
+ logger.log(Level.SEVERE, "Exception finding file.", ex);
+ }
+ return null;
+ }
+
+ /**
+ * Determine the event_type_id of the event from the plaso information.
+ *
+ * @param fileName The name of the file this event is from.
+ * @param row The row returned from the log2timeline table of th eplaso
+ * output.
+ *
+ * @return the event_type_id of the EventType of the given event.
+ *
+ * @throws SQLException
+ */
+ private TimelineEventType findEventSubtype(String fileName, ResultSet row) throws SQLException {
+ switch (row.getString("source")) {
+ case "WEBHIST": //These shouldn't actually be present, but keeping the logic just in case...
+ if (fileName.toLowerCase().contains(COOKIE)
+ || row.getString("type").toLowerCase().contains(COOKIE)) {//NON-NLS
+
+ return TimelineEventType.WEB_COOKIE;
+ } else {
+ return TimelineEventType.WEB_HISTORY;
+ }
+ case "EVT":
+ case "LOG":
+ return TimelineEventType.LOG_ENTRY;
+ case "REG":
+ switch (row.getString("sourcetype").toLowerCase()) {//NON-NLS
+ case "unknown : usb entries":
+ case "unknown : usbstor entries":
+ return TimelineEventType.DEVICES_ATTACHED;
+ default:
+ return TimelineEventType.REGISTRY;
+ }
+ default:
+ return TimelineEventType.OTHER;
+ }
+ }
+
+ /**
+ * Runs in a thread and reads the output of log2timeline. It redirectes the
+ * output both to a log file, and to the status message of the Plaso ingest
+ * module progress bar.
+ */
+ private static class L2TStatusProcessor implements Runnable, Cancellable {
+
+ private final BufferedReader log2TimeLineOutpout;
+ private final DataSourceIngestModuleProgress statusHelper;
+ volatile private boolean cancelled = false;
+ private final Path outputPath;
+
+ private L2TStatusProcessor(BufferedReader log2TimeLineOutpout, DataSourceIngestModuleProgress statusHelper, Path outputPath) throws IOException {
+ this.log2TimeLineOutpout = log2TimeLineOutpout;
+ this.statusHelper = statusHelper;
+ this.outputPath = outputPath;
+ }
+
+ @Override
+ public void run() {
+ try (BufferedWriter writer = Files.newBufferedWriter(outputPath.resolve("log2timeline_output.txt"));) {//NON-NLS
+ String line = log2TimeLineOutpout.readLine();
+ while (cancelled == false && nonNull(line)) {
+ statusHelper.progress(line);
+ writer.write(line);
+ writer.newLine();
+ line = log2TimeLineOutpout.readLine();
+ }
+ writer.flush();
+ } catch (IOException ex) {
+ logger.log(Level.WARNING, "Error reading log2timeline output stream.", ex);//NON-NLS
+ }
+ }
+
+ @Override
+ public boolean cancel() {
+ cancelled = true;
+ return true;
+ }
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleFactory.java b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleFactory.java
new file mode 100755
index 0000000000..4e082a0e6e
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleFactory.java
@@ -0,0 +1,112 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2018-2019 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.plaso;
+
+import org.openide.util.NbBundle;
+import org.openide.util.lookup.ServiceProvider;
+import org.sleuthkit.autopsy.coreutils.Version;
+import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
+import org.sleuthkit.autopsy.ingest.FileIngestModule;
+import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
+import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
+
+/**
+ * A factory that creates data source ingest modules that run Plaso against an
+ * image and saves the storage file to module output.
+ */
+@ServiceProvider(service = IngestModuleFactory.class)
+@NbBundle.Messages({"PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings"})
+public class PlasoModuleFactory implements IngestModuleFactory {
+
+ @NbBundle.Messages({"PlasoModuleFactory_moduleName=Plaso"})
+ static String getModuleName() {
+ return Bundle.PlasoModuleFactory_moduleName();
+ }
+
+ @Override
+ public String getModuleDisplayName() {
+ return getModuleName();
+ }
+
+ @NbBundle.Messages({"PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source."})
+ @Override
+ public String getModuleDescription() {
+ return Bundle.PlasoModuleFactory_moduleDesc();
+ }
+
+ @Override
+ public String getModuleVersionNumber() {
+ return Version.getVersion();
+ }
+
+ @Override
+ public boolean isDataSourceIngestModuleFactory() {
+ return true;
+ }
+
+ @Override
+ public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings settings) {
+ assert settings instanceof PlasoModuleSettings;
+ if (settings instanceof PlasoModuleSettings) {
+ return new PlasoIngestModule((PlasoModuleSettings) settings);
+ }
+ throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
+ }
+
+ @Override
+ public boolean hasGlobalSettingsPanel() {
+ return false;
+ }
+
+ @Override
+ public IngestModuleGlobalSettingsPanel getGlobalSettingsPanel() {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
+ return new PlasoModuleSettings();
+ }
+
+ @Override
+ public boolean hasIngestJobSettingsPanel() {
+ return true;
+ }
+
+ @Override
+ public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
+ assert settings instanceof PlasoModuleSettings;
+ if (settings instanceof PlasoModuleSettings) {
+ return new PlasoModuleSettingsPanel((PlasoModuleSettings) settings);
+ }
+ throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
+ }
+
+ @Override
+ public boolean isFileIngestModuleFactory() {
+ return false;
+ }
+
+ @Override
+ public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) {
+ throw new UnsupportedOperationException();
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettings.java b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettings.java
new file mode 100755
index 0000000000..2730f4365f
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettings.java
@@ -0,0 +1,92 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.plaso;
+
+import com.google.common.collect.ImmutableMap;
+import java.util.HashMap;
+import java.util.Map;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+
+/**
+ * Settings for the Plaso Ingest Module.
+ */
+public class PlasoModuleSettings implements IngestModuleIngestJobSettings {
+
+ private static final long serialVersionUID = 1L;
+
+ /** Map from parser name (or match pattern) to its enabled state. */
+ final Map parsers = new HashMap<>();
+
+ /**
+ * Get an immutable map from parser name to its enabled state. Parsers
+ * mapped to true or with no entry will be enabled. Parsers mapped to false,
+ * will be disabled.
+ */
+ Map getParsers() {
+ return ImmutableMap.copyOf(parsers);
+ }
+
+ /**
+ * Constructor. The PlasoModuleSettings will have the default parsers
+ * (winreg, pe, chrome, firefox, internet explorer) disabled.
+ */
+ public PlasoModuleSettings() {
+ parsers.put("winreg", false);
+ parsers.put("pe", false);
+
+ //chrome
+ parsers.put("chrome_preferences", false);
+ parsers.put("chrome_cache", false);
+ parsers.put("chrome_27_history", false);
+ parsers.put("chrome_8_history", false);
+ parsers.put("chrome_cookies", false);
+ parsers.put("chrome_extension_activity", false);
+
+ //firefox
+ parsers.put("firefox_cache", false);
+ parsers.put("firefox_cache2", false);
+ parsers.put("firefox_cookies", false);
+ parsers.put("firefox_downloads", false);
+ parsers.put("firefox_history", false);
+
+ //Internet Explorer
+ parsers.put("msiecf", false);
+ parsers.put("msie_webcache", false);
+ }
+
+ /**
+ * Gets the serialization version number.
+ *
+ * @return A serialization version number.
+ */
+ @Override
+ public long getVersionNumber() {
+ return serialVersionUID;
+ }
+
+ /**
+ * Set the given parser enabled/disabled
+ *
+ * @param parserName The name of the parser to enable/disable
+ * @param selected The new state (enabled/disabled) for the given parser.
+ */
+ void setParserEnabled(String parserName, boolean selected) {
+ parsers.put(parserName, selected);
+ }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.form
new file mode 100755
index 0000000000..cb35a3edf0
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.form
@@ -0,0 +1,104 @@
+
+
+
diff --git a/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.java
new file mode 100755
index 0000000000..46a6ee89a0
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.java
@@ -0,0 +1,146 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.modules.plaso;
+
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
+import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
+
+/**
+ * Settings panel for the PlasoIngestModule.
+ */
+public class PlasoModuleSettingsPanel extends IngestModuleIngestJobSettingsPanel {
+
+ private final PlasoModuleSettings settings;
+
+ public PlasoModuleSettingsPanel(PlasoModuleSettings settings) {
+ this.settings = settings;
+ initComponents();
+ }
+
+ /** This method is called from within the constructor to initialize the
+ * form. WARNING: Do NOT modify this code. The content of this method is
+ * always regenerated by the Form Editor.
+ */
+ @SuppressWarnings("unchecked")
+ // //GEN-BEGIN:initComponents
+ private void initComponents() {
+ java.awt.GridBagConstraints gridBagConstraints;
+
+ jFileChooser1 = new javax.swing.JFileChooser();
+ winRegCheckBox = new javax.swing.JCheckBox();
+ peCheckBox = new javax.swing.JCheckBox();
+ plasoParserInfoTextArea = new javax.swing.JTextArea();
+ noteLabel = new javax.swing.JLabel();
+ disabledNoteLabel = new javax.swing.JLabel();
+
+ setLayout(new java.awt.GridBagLayout());
+
+ org.openide.awt.Mnemonics.setLocalizedText(winRegCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.winRegCheckBox.text")); // NOI18N
+ winRegCheckBox.addActionListener(new java.awt.event.ActionListener() {
+ public void actionPerformed(java.awt.event.ActionEvent evt) {
+ winRegCheckBoxActionPerformed(evt);
+ }
+ });
+ gridBagConstraints = new java.awt.GridBagConstraints();
+ gridBagConstraints.gridx = 0;
+ gridBagConstraints.gridy = 2;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
+ gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
+ gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.insets = new java.awt.Insets(0, 15, 5, 15);
+ add(winRegCheckBox, gridBagConstraints);
+
+ org.openide.awt.Mnemonics.setLocalizedText(peCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.peCheckBox.text")); // NOI18N
+ peCheckBox.addActionListener(new java.awt.event.ActionListener() {
+ public void actionPerformed(java.awt.event.ActionEvent evt) {
+ peCheckBoxActionPerformed(evt);
+ }
+ });
+ gridBagConstraints = new java.awt.GridBagConstraints();
+ gridBagConstraints.gridx = 0;
+ gridBagConstraints.gridy = 3;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
+ gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
+ gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.insets = new java.awt.Insets(0, 15, 9, 15);
+ add(peCheckBox, gridBagConstraints);
+
+ plasoParserInfoTextArea.setEditable(false);
+ plasoParserInfoTextArea.setBackground(javax.swing.UIManager.getDefaults().getColor("Panel.background"));
+ plasoParserInfoTextArea.setColumns(20);
+ plasoParserInfoTextArea.setLineWrap(true);
+ plasoParserInfoTextArea.setRows(1);
+ plasoParserInfoTextArea.setText(org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.plasoParserInfoTextArea.text")); // NOI18N
+ plasoParserInfoTextArea.setWrapStyleWord(true);
+ plasoParserInfoTextArea.setBorder(null);
+ plasoParserInfoTextArea.setPreferredSize(new java.awt.Dimension(160, 50));
+ gridBagConstraints = new java.awt.GridBagConstraints();
+ gridBagConstraints.gridx = 0;
+ gridBagConstraints.gridy = 1;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
+ gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
+ gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.insets = new java.awt.Insets(5, 15, 9, 15);
+ add(plasoParserInfoTextArea, gridBagConstraints);
+
+ org.openide.awt.Mnemonics.setLocalizedText(noteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.noteLabel.text")); // NOI18N
+ gridBagConstraints = new java.awt.GridBagConstraints();
+ gridBagConstraints.gridx = 0;
+ gridBagConstraints.gridy = 0;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
+ gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
+ gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.insets = new java.awt.Insets(9, 15, 9, 15);
+ add(noteLabel, gridBagConstraints);
+
+ org.openide.awt.Mnemonics.setLocalizedText(disabledNoteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.disabledNoteLabel.text")); // NOI18N
+ gridBagConstraints = new java.awt.GridBagConstraints();
+ gridBagConstraints.gridx = 0;
+ gridBagConstraints.gridy = 4;
+ gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
+ gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
+ gridBagConstraints.weightx = 1.0;
+ gridBagConstraints.weighty = 1.0;
+ gridBagConstraints.insets = new java.awt.Insets(5, 15, 0, 15);
+ add(disabledNoteLabel, gridBagConstraints);
+ }// //GEN-END:initComponents
+
+ private void winRegCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_winRegCheckBoxActionPerformed
+ settings.setParserEnabled("winreg", winRegCheckBox.isSelected());
+ }//GEN-LAST:event_winRegCheckBoxActionPerformed
+
+ private void peCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_peCheckBoxActionPerformed
+ settings.setParserEnabled("pe", peCheckBox.isSelected());
+ }//GEN-LAST:event_peCheckBoxActionPerformed
+
+ @Override
+ public IngestModuleIngestJobSettings getSettings() {
+ return settings;
+ }
+
+
+ // Variables declaration - do not modify//GEN-BEGIN:variables
+ private javax.swing.JLabel disabledNoteLabel;
+ private javax.swing.JFileChooser jFileChooser1;
+ private javax.swing.JLabel noteLabel;
+ private javax.swing.JCheckBox peCheckBox;
+ private javax.swing.JTextArea plasoParserInfoTextArea;
+ private javax.swing.JCheckBox winRegCheckBox;
+ // End of variables declaration//GEN-END:variables
+}
diff --git a/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java b/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
index 4df69801aa..de1f76e2fd 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/stix/StixArtifactData.java
@@ -1,15 +1,15 @@
/*
* Autopsy Forensic Browser
- *
+ *
* Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier sleuthkit org
- *
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
+ *
* http://www.apache.org/licenses/LICENSE-2.0
- *
+ *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,18 +18,23 @@
*/
package org.sleuthkit.autopsy.modules.stix;
-import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collection;
import java.util.logging.Level;
+import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
+import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
+import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@@ -38,18 +43,20 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
class StixArtifactData {
+ private static final String MODULE_NAME = "Stix";
+
private AbstractFile file;
private final String observableId;
private final String objType;
private static final Logger logger = Logger.getLogger(StixArtifactData.class.getName());
- public StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
+ StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
file = a_file;
observableId = a_observableId;
objType = a_objType;
}
- public StixArtifactData(long a_objId, String a_observableId, String a_objType) {
+ StixArtifactData(long a_objId, String a_observableId, String a_objType) {
try {
Case case1 = Case.getCurrentCaseThrows();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
@@ -62,39 +69,35 @@ class StixArtifactData {
}
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
- "StixArtifactData.noOpenCase.errMsg=No open case available."})
+ "StixArtifactData.noOpenCase.errMsg=No open case available."})
public void createArtifact(String a_title) throws TskCoreException {
- Case currentCase;
+ Blackboard blackboard;
try {
- currentCase = Case.getCurrentCaseThrows();
+ blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage());
return;
}
-
- String setName;
- if (a_title != null) {
- setName = "STIX Indicator - " + a_title; //NON-NLS
- } else {
- setName = "STIX Indicator - (no title)"; //NON-NLS
- }
- Collection attributes = new ArrayList<>();
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, "Stix", setName)); //NON-NLS
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, "Stix", observableId)); //NON-NLS
- attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, "Stix", objType)); //NON-NLS
-
- org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
+ String setName = "STIX Indicator - " + StringUtils.defaultIfBlank(a_title, "(no title)"); //NON-NLS
+
+ Collection attributes = Arrays.asList(
+ new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, setName),
+ new BlackboardAttribute(TSK_TITLE, MODULE_NAME, observableId),
+ new BlackboardAttribute(TSK_CATEGORY, MODULE_NAME, objType));
+
// Create artifact if it doesn't already exist.
- if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
- BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
+ if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
+ BlackboardArtifact bba = file.newArtifact(TSK_INTERESTING_FILE_HIT);
bba.addAttributes(attributes);
-
+
try {
- // index the artifact for keyword search
- Blackboard blackboard = currentCase.getServices().getBlackboard();
- blackboard.indexArtifact(bba);
+ /*
+ * post the artifact which will index the artifact for keyword
+ * search, and fire an event to notify UI of this new artifact
+ */
+ blackboard.postArtifact(bba, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());
diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java
index 032f7da506..35dc103e5c 100644
--- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java
+++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactType.java
@@ -23,7 +23,7 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
@@ -65,7 +65,7 @@ final class CustomArtifactType {
* @throws BlackboardException If there is an error adding any of the types.
*/
static void addToCaseDatabase() throws Blackboard.BlackboardException, NoCurrentCaseException {
- Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
artifactType = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAME, ARTIFACT_DISPLAY_NAME);
intAttrType = blackboard.getOrAddAttributeType(INT_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, INT_ATTR_DISPLAY_NAME);
doubleAttrType = blackboard.getOrAddAttributeType(DOUBLE_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, DOUBLE_ATTR_DISPLAY_NAME);
diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java
index 8293ba934e..d9a321e216 100644
--- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorDataSourceIngestModule.java
@@ -21,11 +21,11 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
diff --git a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java
index abd6f0d00d..1e263dc50a 100644
--- a/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/test/CustomArtifactsCreatorFileIngestModule.java
@@ -21,10 +21,10 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
diff --git a/Core/src/org/sleuthkit/autopsy/test/InterestingArtifactCreatorIngestModule.java b/Core/src/org/sleuthkit/autopsy/test/InterestingArtifactCreatorIngestModule.java
index f3232ceb86..7df13edfad 100644
--- a/Core/src/org/sleuthkit/autopsy/test/InterestingArtifactCreatorIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/test/InterestingArtifactCreatorIngestModule.java
@@ -26,17 +26,17 @@ import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
/**
- * A file ingest module that creates some interestng artifacts
+ * A file ingest module that creates some interesting artifacts
* with attributes based on files for test purposes.
*/
@NbBundle.Messages({
@@ -55,7 +55,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
try {
- Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
artifactType = blackboard.getOrAddArtifactType(INT_ARTIFACT_TYPE_NAME, INT_ARTIFACT_DISPLAY_NAME);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.InterestingArtifactCreatorIngestModule_exceptionMessage_errorCreatingCustomType(), ex);
@@ -77,7 +77,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
* type.
*/
int randomArtIndex = (int) (Math.random() * 3);
- Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
+ Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]);
BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID());
Collection baseAttributes = new ArrayList<>();
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties b/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
old mode 100644
new mode 100755
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties-MERGED
index e6b3e114cb..dc0c674679 100755
--- a/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties-MERGED
@@ -2,6 +2,8 @@ CTL_MakeTimeline=Timeline
CTL_TimeLineTopComponentAction=TimeLineTopComponent
CTL_TimeLineTopComponent=Timeline
+FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval.
+FilteredEventsModel.timeRangeProperty.errorTitle=Timeline
OpenTimelineAction.displayName=Timeline
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.
@@ -9,7 +11,7 @@ PrompDialogManager.buttonType.continueNoUpdate=Continue Without Updating
PrompDialogManager.buttonType.showTimeline=Continue
PrompDialogManager.buttonType.update=Update DB
PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?
-PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.
+PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.
PromptDialogManager.progressDialog.title=Populating Timeline Data
PromptDialogManager.rebuildPrompt.details=Details
PromptDialogManager.rebuildPrompt.headerText=The Timeline DB is incomplete and/or out of date. Some events may be missing or inaccurate and some features may be unavailable.
@@ -25,6 +27,7 @@ ShowInTimelineDialog.fileTitle=View {0} in timeline.
ShowInTimelineDialog.showTimelineButtonType.text=Show Timeline
Timeline.dialogs.title=\ Timeline
Timeline.frameName.text={0} - Autopsy Timeline
+Timeline.old.version=\ This Case was created with an older version of Autopsy.\nThe Timeline with not show events from data sources added with the older version of Autopsy
Timeline.resultsPanel.title=Timeline Results
Timeline.runJavaFxThread.progress.creating=Creating timeline . . .
Timeline.zoomOutButton.text=Zoom Out
@@ -32,16 +35,6 @@ Timeline.goToButton.text=Go To:
Timeline.yearBarChart.x.years=Years
Timeline.resultPanel.loading=Loading...
-TimeLineController.errorTitle=Timeline error.
-TimeLineController.outOfDate.errorMessage=Error determing if the timeline is out of date. We will assume it should be updated. See the logs for more details.
-TimeLineController.rebuildReasons.incompleteOldSchema=The Timeline events database was previously populated without incomplete information: Some features may be unavailable or non-functional unless you update the events database.
-TimeLineController.rebuildReasons.ingestWasRunning=The Timeline events database was previously populated while ingest was running: Some events may be missing, incomplete, or inaccurate.
-TimeLineController.rebuildReasons.outOfDate=The event data is out of date: Not all events will be visible.
-TimeLineController.rebuildReasons.outOfDateError=Could not determine if the timeline data is out of date.
-TimeLineController.setEventsDBStale.errMsgNotStale=Failed to mark the timeline db as not stale. Some results may be out of date or missing.
-TimeLineController.setEventsDBStale.errMsgStale=Failed to mark the timeline db as stale. Some results may be out of date or missing.
-TimeLinecontroller.setIngestRunning.errMsgNotRunning=Failed to mark the timeline db as populated while ingest was not running. Some results may be out of date or missing.
-TimeLineController.setIngestRunning.errMsgRunning=Failed to mark the timeline db as populated while ingest was running. Some results may be out of date or missing.
TimeLinecontroller.updateNowQuestion=Do you want to update the events database now?
TimelineFrame.title=Timeline
TimelinePanel.jButton1.text=6m
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/Bundle_ja.properties b/Core/src/org/sleuthkit/autopsy/timeline/Bundle_ja.properties
old mode 100644
new mode 100755
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/CancellationProgressTask.java b/Core/src/org/sleuthkit/autopsy/timeline/CancellationProgressTask.java
old mode 100644
new mode 100755
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/ChronoFieldListCell.java b/Core/src/org/sleuthkit/autopsy/timeline/ChronoFieldListCell.java
old mode 100644
new mode 100755
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/FXMLConstructor.java b/Core/src/org/sleuthkit/autopsy/timeline/FXMLConstructor.java
old mode 100644
new mode 100755
diff --git a/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
new file mode 100755
index 0000000000..457939f04c
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
@@ -0,0 +1,699 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2011-2019 Basis Technology Corp.
+ * Contact: carrier sleuthkit org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.timeline;
+
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.LoadingCache;
+import com.google.common.collect.ImmutableList;
+import com.google.common.eventbus.EventBus;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.logging.Level;
+import javafx.beans.InvalidationListener;
+import javafx.beans.property.ReadOnlyObjectProperty;
+import javafx.beans.property.ReadOnlyObjectWrapper;
+import javafx.collections.FXCollections;
+import javafx.collections.ObservableList;
+import javafx.collections.ObservableMap;
+import javafx.collections.ObservableSet;
+import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
+import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
+import org.joda.time.DateTimeZone;
+import org.joda.time.Interval;
+import org.openide.util.NbBundle;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
+import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
+import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
+import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
+import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
+import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
+import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
+import org.sleuthkit.autopsy.events.AutopsyEvent;
+import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
+import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
+import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
+import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
+import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
+import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
+import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
+import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
+import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardArtifactTag;
+import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.ContentTag;
+import org.sleuthkit.datamodel.DataSource;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.Tag;
+import org.sleuthkit.datamodel.TagName;
+import org.sleuthkit.datamodel.TimelineManager;
+import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.datamodel.TimelineEvent;
+import org.sleuthkit.datamodel.TimelineEventType;
+import org.sleuthkit.datamodel.TimelineFilter;
+import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
+import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
+import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
+import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
+import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
+import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
+import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
+import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
+import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
+import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
+import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
+
+/**
+ * This class acts as the model for a TimelineView
+ *
+ * Views can register listeners on properties returned by methods.
+ *
+ * This class is implemented as a filtered view into an underlying
+ * TimelineManager.
+ *
+ * Maintainers, NOTE: as many methods as possible should cache their results so
+ * as to avoid unnecessary db calls through the TimelineManager -jm
+ *
+ * Concurrency Policy: TimelineManager is internally synchronized, so methods
+ * that only access the TimelineManager atomically do not need further
+ * synchronization. All other member state variables should only be accessed
+ * with intrinsic lock of containing FilteredEventsModel held.
+ *
+ */
+public final class FilteredEventsModel {
+
+ private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
+
+ private final TimelineManager eventManager;
+
+ private final Case autoCase;
+ private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
+
+ //Filter and zoome state
+ private final ReadOnlyObjectWrapper requestedFilter = new ReadOnlyObjectWrapper<>();
+ private final ReadOnlyObjectWrapper requestedTimeRange = new ReadOnlyObjectWrapper<>();
+ private final ReadOnlyObjectWrapper requestedZoomState = new ReadOnlyObjectWrapper<>();
+ private final ReadOnlyObjectWrapper< TimelineEventType.TypeLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.TypeLevel.BASE_TYPE);
+ private final ReadOnlyObjectWrapper< TimelineEvent.DescriptionLevel> requestedLOD = new ReadOnlyObjectWrapper<>(TimelineEvent.DescriptionLevel.SHORT);
+ // end Filter and zoome state
+
+ //caches
+ private final LoadingCache