mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-15 01:07:42 +00:00
Merge pull request #5103 from rcordovano/kellykelly3-develop-merg
Merge timeline-event-mgr-2 branch into develop
This commit is contained in:
commit
ab60e9c29e
@ -39,7 +39,11 @@
|
||||
<copy todir="${basedir}/release/Tesseract-OCR" >
|
||||
<fileset dir="${thirdparty.dir}/Tesseract-OCR"/>
|
||||
</copy>
|
||||
|
||||
|
||||
<!--Copy Plaso to release-->
|
||||
<copy todir="${basedir}/release/plaso" >
|
||||
<fileset dir="${thirdparty.dir}/plaso"/>
|
||||
</copy>
|
||||
<!--Copy GStreamer to release-->
|
||||
<copy todir="${basedir}/release/gstreamer" >
|
||||
<fileset dir="${thirdparty.dir}/gstreamer"/>
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.sleuthkit.autopsy.casemodule;
|
||||
|
||||
import com.google.common.annotations.Beta;
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData;
|
||||
import java.awt.Frame;
|
||||
import java.awt.event.ActionEvent;
|
||||
@ -68,7 +69,6 @@ import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.actions.OpenOutputFolderAction;
|
||||
import org.sleuthkit.autopsy.appservices.AutopsyService;
|
||||
import org.sleuthkit.autopsy.appservices.AutopsyService.CaseContext;
|
||||
import static org.sleuthkit.autopsy.casemodule.Bundle.*;
|
||||
import org.sleuthkit.autopsy.casemodule.CaseMetadata.CaseMetadataException;
|
||||
import org.sleuthkit.autopsy.casemodule.datasourcesummary.DataSourceSummaryAction;
|
||||
import org.sleuthkit.autopsy.casemodule.events.AddingDataSourceEvent;
|
||||
@ -108,12 +108,17 @@ import org.sleuthkit.autopsy.events.AutopsyEventException;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJob;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
|
||||
import org.sleuthkit.autopsy.progress.LoggingProgressIndicator;
|
||||
import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator;
|
||||
import org.sleuthkit.autopsy.progress.ProgressIndicator;
|
||||
import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
|
||||
import org.sleuthkit.autopsy.timeline.events.TimelineEventAddedEvent;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.CaseDbConnectionInfo;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -121,6 +126,7 @@ import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.Report;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TimelineManager;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
|
||||
|
||||
@ -155,6 +161,7 @@ public class Case {
|
||||
private CollaborationMonitor collaborationMonitor;
|
||||
private Services caseServices;
|
||||
private boolean hasDataSources;
|
||||
private final TSKCaseRepublisher tskEventForwarder = new TSKCaseRepublisher();
|
||||
|
||||
/*
|
||||
* Get a reference to the main window of the desktop application to use to
|
||||
@ -388,13 +395,44 @@ public class Case {
|
||||
*/
|
||||
TAG_DEFINITION_CHANGED,
|
||||
/**
|
||||
* An item in the central repository has had its comment modified. The
|
||||
* old value is null, the new value is string for current comment.
|
||||
* An timeline event, such mac time or web activity was added to the
|
||||
* current case. The old value is null and the new value is the
|
||||
* TimelineEvent that was added.
|
||||
*/
|
||||
TIMELINE_EVENT_ADDED,
|
||||
/* An item in the central repository has had its comment
|
||||
* modified. The old value is null, the new value is string for current
|
||||
* comment.
|
||||
*/
|
||||
CR_COMMENT_CHANGED;
|
||||
|
||||
};
|
||||
|
||||
private final class TSKCaseRepublisher {
|
||||
|
||||
@Subscribe
|
||||
public void rebroadcastTimelineEventCreated(TimelineManager.TimelineEventAddedEvent event) {
|
||||
eventPublisher.publish(new TimelineEventAddedEvent(event));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Subscribe
|
||||
public void rebroadcastArtifactsPosted(Blackboard.ArtifactsPostedEvent event) {
|
||||
for (BlackboardArtifact.Type artifactType : event.getArtifactTypes()) {
|
||||
/*
|
||||
* fireModuleDataEvent is deprecated so module writers don't use
|
||||
* it (they should use Blackboard.postArtifact(s) instead), but
|
||||
* we still need a way to rebroadcast the ArtifactsPostedEvent
|
||||
* as a ModuleDataEvent.
|
||||
*/
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(
|
||||
event.getModuleName(),
|
||||
artifactType,
|
||||
event.getArtifacts(artifactType)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a subscriber to all case events. To subscribe to only specific
|
||||
* events, use one of the overloads of addEventSubscriber.
|
||||
@ -499,8 +537,8 @@ public class Case {
|
||||
*/
|
||||
public static boolean isValidName(String caseName) {
|
||||
return !(caseName.contains("\\") || caseName.contains("/") || caseName.contains(":")
|
||||
|| caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
|
||||
|| caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
|
||||
|| caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
|
||||
|| caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -2128,7 +2166,7 @@ public class Case {
|
||||
} else if (UserPreferences.getIsMultiUserModeEnabled()) {
|
||||
caseDb = SleuthkitCase.openCase(databaseName, UserPreferences.getDatabaseConnectionInfo(), metadata.getCaseDirectory());
|
||||
} else {
|
||||
throw new CaseActionException(Case_open_exception_multiUserCaseNotEnabled());
|
||||
throw new CaseActionException(Bundle.Case_open_exception_multiUserCaseNotEnabled());
|
||||
}
|
||||
} catch (TskUnsupportedSchemaVersionException ex) {
|
||||
throw new CaseActionException(Bundle.Case_exceptionMessage_unsupportedSchemaVersionMessage(ex.getLocalizedMessage()), ex);
|
||||
@ -2150,6 +2188,8 @@ public class Case {
|
||||
private void openCaseLevelServices(ProgressIndicator progressIndicator) {
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_openingCaseLevelServices());
|
||||
this.caseServices = new Services(caseDb);
|
||||
|
||||
caseDb.registerForEvents(tskEventForwarder);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -2415,6 +2455,7 @@ public class Case {
|
||||
*/
|
||||
if (null != caseDb) {
|
||||
progressIndicator.progress(Bundle.Case_progressMessage_closingCaseDatabase());
|
||||
caseDb.unregisterForEvents(tskEventForwarder);
|
||||
caseDb.close();
|
||||
}
|
||||
|
||||
|
@ -19,54 +19,38 @@
|
||||
package org.sleuthkit.autopsy.casemodule.services;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import org.openide.util.Lookup;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskDataException;
|
||||
|
||||
/**
|
||||
* A representation of the blackboard, a place where artifacts and their
|
||||
* attributes are posted.
|
||||
*
|
||||
* NOTE: This API of this class is under development.
|
||||
* @deprecated Use org.sleuthkit.datamodel.Blackboard instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public final class Blackboard implements Closeable {
|
||||
|
||||
private SleuthkitCase caseDb;
|
||||
|
||||
|
||||
/**
|
||||
* Constructs a representation of the blackboard, a place where artifacts
|
||||
* and their attributes are posted.
|
||||
*
|
||||
* @param casedb The case database.
|
||||
*/
|
||||
Blackboard(SleuthkitCase casedb) {
|
||||
this.caseDb = casedb;
|
||||
Blackboard() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Indexes the text associated with the an artifact.
|
||||
* Indexes the text associated with an artifact.
|
||||
*
|
||||
* @param artifact The artifact to be indexed.
|
||||
*
|
||||
* @throws BlackboardException If there is a problem indexing the artifact.
|
||||
*/
|
||||
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
|
||||
if (null == caseDb) {
|
||||
throw new BlackboardException("Blackboard has been closed");
|
||||
}
|
||||
KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
|
||||
if (null == searchService) {
|
||||
throw new BlackboardException("Keyword search service not found");
|
||||
}
|
||||
try {
|
||||
searchService.index(artifact);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new BlackboardException("Error indexing artifact", ex);
|
||||
try{
|
||||
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "");
|
||||
} catch(org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
|
||||
throw new BlackboardException(ex.getMessage(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,19 +67,10 @@ public final class Blackboard implements Closeable {
|
||||
* artifact type.
|
||||
*/
|
||||
public synchronized BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException {
|
||||
if (null == caseDb) {
|
||||
throw new BlackboardException("Blackboard has been closed");
|
||||
}
|
||||
try {
|
||||
return caseDb.addBlackboardArtifactType(typeName, displayName);
|
||||
} catch (TskDataException typeExistsEx) {
|
||||
try {
|
||||
return caseDb.getArtifactType(typeName);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new BlackboardException("Failed to get or add artifact type", ex);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
throw new BlackboardException("Failed to get or add artifact type", ex);
|
||||
return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddArtifactType(typeName, displayName);
|
||||
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
|
||||
throw new BlackboardException(ex.getMessage(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
@ -113,30 +88,20 @@ public final class Blackboard implements Closeable {
|
||||
* attribute type.
|
||||
*/
|
||||
public synchronized BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException {
|
||||
if (null == caseDb) {
|
||||
throw new BlackboardException("Blackboard has been closed");
|
||||
}
|
||||
try {
|
||||
return caseDb.addArtifactAttributeType(typeName, valueType, displayName);
|
||||
} catch (TskDataException typeExistsEx) {
|
||||
try {
|
||||
return caseDb.getAttributeType(typeName);
|
||||
} catch (TskCoreException ex) {
|
||||
throw new BlackboardException("Failed to get or add attribute type", ex);
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
throw new BlackboardException("Failed to get or add attribute type", ex);
|
||||
return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddAttributeType(typeName, valueType, displayName);
|
||||
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
|
||||
throw new BlackboardException(ex.getMessage(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the blackboard.
|
||||
*
|
||||
* @throws IOException If there is a problem closing the blackboard.
|
||||
*/
|
||||
@Override
|
||||
public synchronized void close() throws IOException {
|
||||
caseDb = null;
|
||||
public synchronized void close() {
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -26,6 +26,7 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.openide.util.Lookup;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
@ -39,7 +40,6 @@ public class Services implements Closeable {
|
||||
private final FileManager fileManager;
|
||||
private final TagsManager tagsManager;
|
||||
private final KeywordSearchService keywordSearchService;
|
||||
private final Blackboard blackboard;
|
||||
|
||||
/**
|
||||
* Constructs a collection of case-level services (e.g., file manager, tags
|
||||
@ -59,9 +59,6 @@ public class Services implements Closeable {
|
||||
//null safe so that the functional tests run with no issues.
|
||||
keywordSearchService = Lookup.getDefault().lookup(KeywordSearchService.class);
|
||||
services.add(keywordSearchService);
|
||||
|
||||
blackboard = new Blackboard(caseDb);
|
||||
services.add(blackboard);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -95,9 +92,21 @@ public class Services implements Closeable {
|
||||
* Gets the blackboard service for the current case.
|
||||
*
|
||||
* @return The blackboard service for the current case.
|
||||
*
|
||||
* @deprecated Use org.sleuthkit.autopsy.casemodule.getCaseBlackboard instead
|
||||
*/
|
||||
@Deprecated
|
||||
public Blackboard getBlackboard() {
|
||||
return blackboard;
|
||||
return new Blackboard();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the TSK Blackboard for the current case.
|
||||
*
|
||||
* @return @org.sleuthkit.datamodel.Blackboard Blackboard for the current case.
|
||||
*/
|
||||
public org.sleuthkit.datamodel.Blackboard getCaseBlackboard() {
|
||||
return Case.getCurrentCase().getSleuthkitCase().getBlackboard();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -23,6 +23,7 @@ import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import static java.lang.Boolean.FALSE;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
@ -34,35 +35,40 @@ import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Listen for ingest events and update entries in the Central Repository
|
||||
* database accordingly
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Correlation Engine"})
|
||||
public class IngestEventsListener {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
|
||||
private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
|
||||
|
||||
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
|
||||
private static int correlationModuleInstanceCount;
|
||||
@ -171,8 +177,7 @@ public class IngestEventsListener {
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure the listener to flag devices previously seen in other cases or
|
||||
* not.
|
||||
* Configure the listener to flag previously seen devices or not.
|
||||
*
|
||||
* @param value True to flag seen devices; otherwise false.
|
||||
*/
|
||||
@ -189,86 +194,68 @@ public class IngestEventsListener {
|
||||
createCrProperties = value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an Interesting Item artifact based on a new artifact being previously seen.
|
||||
* @param originalArtifact Original artifact that we want to flag
|
||||
* @param caseDisplayNames List of case names artifact was previously seen in
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
|
||||
"IngestEventsListener.prevCaseComment.text=Previous Case: ",
|
||||
"IngestEventsListener.ingestmodule.name=Correlation Engine"})
|
||||
static private void postCorrelatedBadArtifactToBlackboard(BlackboardArtifact bbArtifact, List<String> caseDisplayNames) {
|
||||
"IngestEventsListener.prevCaseComment.text=Previous Case: "})
|
||||
static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames) {
|
||||
|
||||
try {
|
||||
String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevTaggedSet_text()));
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
|
||||
|
||||
SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
|
||||
AbstractFile abstractFile = tskCase.getAbstractFileById(bbArtifact.getObjectID());
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
|
||||
tifArtifact.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard.indexArtifact(tifArtifact);
|
||||
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
// fire event to notify UI of this new artifact
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
|
||||
} catch (IllegalStateException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
|
||||
}
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevTaggedSet_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_COMMENT, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))),
|
||||
new BlackboardAttribute(
|
||||
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
|
||||
originalArtifact.getArtifactID()));
|
||||
makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Interesting Aritfact hit for a device which was previously seen
|
||||
* Create an Interesting Artifact hit for a device which was previously seen
|
||||
* in the central repository.
|
||||
*
|
||||
* @param bbArtifact the artifact to create the interesting item for
|
||||
* @param originalArtifact the artifact to create the interesting item for
|
||||
*/
|
||||
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
|
||||
"# {0} - typeName",
|
||||
"# {1} - count",
|
||||
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
|
||||
static private void postCorrelatedPreviousArtifactToBlackboard(BlackboardArtifact bbArtifact) {
|
||||
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact) {
|
||||
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevExists_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
|
||||
originalArtifact.getArtifactID()));
|
||||
makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Make an interesting item artifact to flag the passed in artifact.
|
||||
* @param originalArtifact Artifact in current case we want to flag
|
||||
* @param attributesForNewArtifact Attributes to assign to the new Interesting items artifact
|
||||
*/
|
||||
private static void makeAndPostInterestingArtifact(BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact) {
|
||||
try {
|
||||
String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
BlackboardAttribute att = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.IngestEventsListener_prevExists_text());
|
||||
attributes.add(att);
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
|
||||
|
||||
SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
|
||||
AbstractFile abstractFile = bbArtifact.getSleuthkitCase().getAbstractFileById(bbArtifact.getObjectID());
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
|
||||
SleuthkitCase tskCase = originalArtifact.getSleuthkitCase();
|
||||
AbstractFile abstractFile = tskCase.getAbstractFileById(originalArtifact.getObjectID());
|
||||
Blackboard blackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
|
||||
tifArtifact.addAttributes(attributes);
|
||||
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_ARTIFACT_HIT, attributesForNewArtifact)) {
|
||||
BlackboardArtifact newInterestingArtifact = abstractFile.newArtifact(TSK_INTERESTING_ARTIFACT_HIT);
|
||||
newInterestingArtifact.addAttributes(attributesForNewArtifact);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard.indexArtifact(tifArtifact);
|
||||
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
|
||||
blackboard.postArtifact(newInterestingArtifact, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newInterestingArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
// fire event to notify UI of this new artifact
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
|
||||
@ -283,7 +270,7 @@ public class IngestEventsListener {
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
//if ingest is running we want there to check if there is a Correlation Engine module running
|
||||
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
|
||||
//in these cases we still want to create correlation attributes for those artifacts when appropriate
|
||||
//in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate
|
||||
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
|
||||
EamDb dbManager;
|
||||
try {
|
||||
@ -319,7 +306,7 @@ public class IngestEventsListener {
|
||||
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
|
||||
case DATA_SOURCE_ANALYSIS_COMPLETED: {
|
||||
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
|
||||
@ -333,10 +320,10 @@ public class IngestEventsListener {
|
||||
}
|
||||
|
||||
private final class AnalysisCompleteTask implements Runnable {
|
||||
|
||||
|
||||
private final EamDb dbManager;
|
||||
private final PropertyChangeEvent event;
|
||||
|
||||
|
||||
private AnalysisCompleteTask(EamDb db, PropertyChangeEvent evt) {
|
||||
dbManager = db;
|
||||
event = evt;
|
||||
@ -362,15 +349,15 @@ public class IngestEventsListener {
|
||||
long dataSourceObjectId = -1;
|
||||
try {
|
||||
dataSource = ((DataSourceAnalysisCompletedEvent) event).getDataSource();
|
||||
|
||||
|
||||
/*
|
||||
* We only care about Images for the purpose of updating hash
|
||||
* values.
|
||||
* We only care about Images for the purpose of
|
||||
* updating hash values.
|
||||
*/
|
||||
if (!(dataSource instanceof Image)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
dataSourceName = dataSource.getName();
|
||||
dataSourceObjectId = dataSource.getId();
|
||||
|
||||
@ -398,7 +385,7 @@ public class IngestEventsListener {
|
||||
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
|
||||
correlationDataSource.setMd5(imageMd5Hash);
|
||||
}
|
||||
|
||||
|
||||
String imageSha1Hash = image.getSha1();
|
||||
if (imageSha1Hash == null) {
|
||||
imageSha1Hash = "";
|
||||
@ -407,7 +394,7 @@ public class IngestEventsListener {
|
||||
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
|
||||
correlationDataSource.setSha1(imageSha1Hash);
|
||||
}
|
||||
|
||||
|
||||
String imageSha256Hash = image.getSha256();
|
||||
if (imageSha256Hash == null) {
|
||||
imageSha256Hash = "";
|
||||
@ -441,8 +428,8 @@ public class IngestEventsListener {
|
||||
private final boolean createCorrelationAttributes;
|
||||
|
||||
private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes) {
|
||||
dbManager = db;
|
||||
event = evt;
|
||||
this.dbManager = db;
|
||||
this.event = evt;
|
||||
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
|
||||
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
|
||||
this.createCorrelationAttributes = createCorrelationAttributes;
|
||||
@ -476,7 +463,7 @@ public class IngestEventsListener {
|
||||
try {
|
||||
caseDisplayNames = dbManager.getListCasesHavingArtifactInstancesKnownBad(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
if (!caseDisplayNames.isEmpty()) {
|
||||
postCorrelatedBadArtifactToBlackboard(bbArtifact,
|
||||
makeAndPostPreviousNotableArtifact(bbArtifact,
|
||||
caseDisplayNames);
|
||||
}
|
||||
} catch (CorrelationAttributeNormalizationException ex) {
|
||||
@ -484,7 +471,7 @@ public class IngestEventsListener {
|
||||
}
|
||||
}
|
||||
if (flagPreviousItemsEnabled
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|
||||
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
|
||||
@ -494,7 +481,7 @@ public class IngestEventsListener {
|
||||
List<CorrelationAttributeInstance> previousOccurences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
|
||||
for (CorrelationAttributeInstance instance : previousOccurences) {
|
||||
if (!instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) {
|
||||
postCorrelatedPreviousArtifactToBlackboard(bbArtifact);
|
||||
makeAndPostPreviousSeenArtifact(bbArtifact);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -19,41 +19,44 @@
|
||||
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
|
||||
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
|
||||
import org.sleuthkit.autopsy.core.RuntimeProperties;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
|
||||
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.datamodel.HashUtility;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
|
||||
/**
|
||||
* Ingest module for inserting entries into the Central Repository database on
|
||||
@ -63,6 +66,8 @@ import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
|
||||
final class CentralRepoIngestModule implements FileIngestModule {
|
||||
|
||||
private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
|
||||
|
||||
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true;
|
||||
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true;
|
||||
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
|
||||
@ -74,10 +79,10 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
private long jobId;
|
||||
private CorrelationCase eamCase;
|
||||
private CorrelationDataSource eamDataSource;
|
||||
private Blackboard blackboard;
|
||||
private CorrelationAttributeInstance.Type filesType;
|
||||
private final boolean flagTaggedNotableItems;
|
||||
private final boolean flagPreviouslySeenDevices;
|
||||
private Blackboard blackboard;
|
||||
private final boolean createCorrelationProperties;
|
||||
|
||||
/**
|
||||
@ -104,7 +109,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
}
|
||||
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
|
||||
return ProcessResult.ERROR;
|
||||
@ -158,7 +163,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
// insert this file into the central repository
|
||||
// insert this file into the central repository
|
||||
if (createCorrelationProperties) {
|
||||
try {
|
||||
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
|
||||
@ -271,7 +276,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
|
||||
// Don't allow sqlite central repo databases to be used for multi user cases
|
||||
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
|
||||
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
|
||||
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
|
||||
logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
|
||||
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
|
||||
}
|
||||
@ -308,7 +313,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
// if we are the first thread / module for this job, then make sure the case
|
||||
// and image exist in the DB before we associate artifacts with it.
|
||||
if (refCounter.incrementAndGet(jobId)
|
||||
== 1) {
|
||||
== 1) {
|
||||
// ensure we have this data source in the EAM DB
|
||||
try {
|
||||
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
|
||||
@ -330,41 +335,32 @@ final class CentralRepoIngestModule implements FileIngestModule {
|
||||
*/
|
||||
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames) {
|
||||
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
|
||||
new BlackboardAttribute(
|
||||
TSK_COMMENT, MODULE_NAME,
|
||||
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))));
|
||||
try {
|
||||
String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
|
||||
Bundle.CentralRepoIngestModule_prevTaggedSet_text()));
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
|
||||
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
|
||||
|
||||
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT);
|
||||
tifArtifact.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(tifArtifact);
|
||||
blackboard.postArtifact(tifArtifact, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
// send inbox message
|
||||
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash());
|
||||
|
||||
// fire event to notify UI of this new artifact
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
|
||||
} catch (IllegalStateException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,7 +57,7 @@ CVTTopComponent.browseVisualizeTabPane.AccessibleContext.accessibleName=Visualiz
|
||||
CVTTopComponent.vizPanel.TabConstraints.tabTitle_1=Visualize
|
||||
VisualizationPanel.fitGraphButton.text=
|
||||
VisualizationPanel.jTextArea1.text=Right-click an account in the Browse Accounts table, and select 'Visualize' to begin.
|
||||
VisualizationPanel.fitZoomButton.toolTipText=Fit Visualization
|
||||
VisualizationPanel.fitZoomButton.toolTipText=Fit visualization to available space.
|
||||
VisualizationPanel.fitZoomButton.text=
|
||||
# {0} - layout name
|
||||
VisualizationPanel.layoutFail.text={0} layout failed. Try a different layout.
|
||||
@ -67,11 +67,11 @@ VisualizationPanel.lockAction.pluralText=Lock Selected Accounts
|
||||
VisualizationPanel.lockAction.singularText=Lock Selected Account
|
||||
VisualizationPanel.unlockAction.pluralText=Unlock Selected Accounts
|
||||
VisualizationPanel.unlockAction.singularText=Unlock Selected Account
|
||||
VisualizationPanel.zoomActualButton.toolTipText=Reset Zoom
|
||||
VisualizationPanel.zoomActualButton.toolTipText=Reset visualization default zoom state.
|
||||
VisualizationPanel.zoomActualButton.text=
|
||||
VisualizationPanel.zoomInButton.toolTipText=Zoom In
|
||||
VisualizationPanel.zoomInButton.toolTipText=Zoom visualization in.
|
||||
VisualizationPanel.zoomInButton.text=
|
||||
VisualizationPanel.zoomOutButton.toolTipText=Zoom Out
|
||||
VisualizationPanel.zoomOutButton.toolTipText=Zoom visualization out.
|
||||
VisualizationPanel.zoomOutButton.text=
|
||||
VisualizationPanel.fastOrganicLayoutButton.text=
|
||||
VisualizationPanel.backButton.text_1=
|
||||
@ -81,17 +81,17 @@ VisualizationPanel.hierarchyLayoutButton.text=Hierarchical
|
||||
VisualizationPanel.clearVizButton.text_1=
|
||||
VisualizationPanel.snapshotButton.text_1=Snapshot Report
|
||||
VisualizationPanel.clearVizButton.actionCommand=
|
||||
VisualizationPanel.backButton.toolTipText=Click to Go Back
|
||||
VisualizationPanel.forwardButton.toolTipText=Click to Go Forward
|
||||
VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to Redraw Chart
|
||||
VisualizationPanel.clearVizButton.toolTipText=Click to Clear Chart
|
||||
VisualizationPanel.backButton.toolTipText=Click to go back to previous state.
|
||||
VisualizationPanel.forwardButton.toolTipText=Click to move state forward.
|
||||
VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to redraw visualization.
|
||||
VisualizationPanel.clearVizButton.toolTipText=Click to clear visualization.
|
||||
FiltersPanel.limitHeaderLabel.text=Communications Limit:
|
||||
FiltersPanel.mostRecentLabel.text=Most Recent:
|
||||
FiltersPanel.limitErrorMsgLabel.text=Invalid integer value.
|
||||
VisualizationPanel.forwardButton.text=
|
||||
VisualizationPanel.zoomPercentLabel.text=100%
|
||||
VisualizationPanel.zoomLabel.text=Zoom:
|
||||
VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot Report
|
||||
VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot report.
|
||||
VisualizationPanel_action_dialogs_title=Communications
|
||||
VisualizationPanel_action_name_text=Snapshot Report
|
||||
VisualizationPanel_module_name=Communications
|
||||
|
@ -196,6 +196,53 @@ public final class ExecUtil {
|
||||
}
|
||||
return process.exitValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for the given process to finish, using the given ProcessTerminator.
|
||||
*
|
||||
* @param command The command that was used to start the process. Used
|
||||
* only for logging purposes.
|
||||
* @param process The process to wait for.
|
||||
* @param terminator The ProcessTerminator used to determine if the process
|
||||
* should be killed.
|
||||
*
|
||||
* @returnthe exit value of the process
|
||||
*
|
||||
* @throws SecurityException if a security manager exists and vetoes any
|
||||
* aspect of running the process.
|
||||
* @throws IOException if an I/o error occurs.
|
||||
*/
|
||||
public static int waitForTermination(String command, Process process, ProcessTerminator terminator) throws SecurityException, IOException {
|
||||
return ExecUtil.waitForTermination(command, process, ExecUtil.DEFAULT_TIMEOUT, ExecUtil.DEFAULT_TIMEOUT_UNITS, terminator);
|
||||
}
|
||||
|
||||
private static int waitForTermination(String command, Process process, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException {
|
||||
try {
|
||||
do {
|
||||
process.waitFor(timeOut, units);
|
||||
if (process.isAlive() && terminator.shouldTerminateProcess()) {
|
||||
killProcess(process);
|
||||
try {
|
||||
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
|
||||
} catch (InterruptedException exx) {
|
||||
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
|
||||
}
|
||||
}
|
||||
} while (process.isAlive());
|
||||
} catch (InterruptedException ex) {
|
||||
if (process.isAlive()) {
|
||||
killProcess(process);
|
||||
}
|
||||
try {
|
||||
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
|
||||
} catch (InterruptedException exx) {
|
||||
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
|
||||
}
|
||||
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, "Thread interrupted while running {0}", command); // NON-NLS
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
return process.exitValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Kills a process and its children
|
||||
|
@ -1,19 +1,19 @@
|
||||
/*
|
||||
/*
|
||||
*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2012 Basis Technology Corp.
|
||||
*
|
||||
*
|
||||
* Copyright 2012-2018 Basis Technology Corp.
|
||||
*
|
||||
* Copyright 2012 42six Solutions.
|
||||
* Contact: aebadirad <at> 42six <dot> com
|
||||
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -27,12 +27,12 @@ import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import java.util.logging.Level;
|
||||
|
||||
/**
|
||||
* Database connection class & utilities *
|
||||
* Database connection class & utilities.
|
||||
*/
|
||||
public class SQLiteDBConnect {
|
||||
public class SQLiteDBConnect implements AutoCloseable {
|
||||
|
||||
public String sDriver = "";
|
||||
public String sUrl = null;
|
||||
@ -52,7 +52,7 @@ public class SQLiteDBConnect {
|
||||
* quick and dirty constructor to test the database passing the
|
||||
* DriverManager name and the fully loaded url to handle
|
||||
*/
|
||||
/*
|
||||
/*
|
||||
* NB this will typically be available if you make this class concrete and
|
||||
* not abstract
|
||||
*/
|
||||
@ -104,9 +104,13 @@ public class SQLiteDBConnect {
|
||||
statement.executeUpdate(instruction);
|
||||
}
|
||||
|
||||
// processes an array of instructions e.g. a set of SQL command strings passed from a file
|
||||
//NB you should ensure you either handle empty lines in files by either removing them or parsing them out
|
||||
// since they will generate spurious SQLExceptions when they are encountered during the iteration....
|
||||
/** processes an array of instructions e.g. a set of SQL command strings
|
||||
* passed from a file
|
||||
*
|
||||
* NB you should ensure you either handle empty lines in files by either
|
||||
* removing them or parsing them out since they will generate spurious
|
||||
* SQLExceptions when they are encountered during the iteration....
|
||||
*/
|
||||
public void executeStmt(String[] instructionSet) throws SQLException {
|
||||
for (int i = 0; i < instructionSet.length; i++) {
|
||||
executeStmt(instructionSet[i]);
|
||||
@ -120,7 +124,14 @@ public class SQLiteDBConnect {
|
||||
public void closeConnection() {
|
||||
try {
|
||||
conn.close();
|
||||
} catch (Exception ignore) {
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.WARNING, "Unable to close connection to SQLite DB at " + sUrl, ex);
|
||||
}
|
||||
//Implementing Autoclosable.close() allows this class to be used in try-with-resources.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
closeConnection();
|
||||
}
|
||||
}
|
||||
|
@ -160,6 +160,7 @@ KeywordHits.createSheet.numChildren.name=Number of Children
|
||||
KeywordHits.kwHits.text=Keyword Hits
|
||||
KeywordHits.simpleLiteralSearch.text=Single Literal Keyword Search
|
||||
KeywordHits.singleRegexSearch.text=Single Regular Expression Search
|
||||
LayoutFileNode.getActions.viewFileInDir.text=View File in Directory
|
||||
OpenIDE-Module-Name=DataModel
|
||||
AbstractContentChildren.CreateTSKNodeVisitor.exception.noNodeMsg=No Node defined for the given SleuthkitItem
|
||||
AbstractContentChildren.createAutopsyNodeVisitor.exception.noNodeMsg=No Node defined for the given DisplayableItem
|
||||
|
@ -51,7 +51,7 @@ public abstract class DisplayableItemNode extends AbstractNode {
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
|
||||
protected static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
|
||||
BlackboardAttribute pathIDAttribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
|
||||
if (pathIDAttribute != null) {
|
||||
long contentID = pathIDAttribute.getValueLong();
|
||||
|
@ -34,13 +34,10 @@ import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Services;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.FsContent;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
|
@ -1,16 +1,16 @@
|
||||
/*
|
||||
* Sample module in the public domain. Feel free to use this as a template
|
||||
* for your modules.
|
||||
*
|
||||
*
|
||||
* Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
|
||||
*
|
||||
* This is free and unencumbered software released into the public domain.
|
||||
*
|
||||
*
|
||||
* Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
* distribute this software, either in source code form or as a compiled
|
||||
* binary, for any purpose, commercial or non-commercial, and by any
|
||||
* means.
|
||||
*
|
||||
*
|
||||
* In jurisdictions that recognize copyright laws, the author or authors
|
||||
* of this software dedicate any and all copyright interest in the
|
||||
* software to the public domain. We make this dedication for the benefit
|
||||
@ -18,34 +18,31 @@
|
||||
* successors. We intend this dedication to be an overt act of
|
||||
* relinquishment in perpetuity of all present and future rights to this
|
||||
* software under copyright law.
|
||||
*
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
* OTHER DEALINGS IN THE SOFTWARE.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.examples;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
@ -56,7 +53,7 @@ import org.sleuthkit.datamodel.TskData;
|
||||
class SampleFileIngestModule implements FileIngestModule {
|
||||
|
||||
private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>();
|
||||
private static BlackboardAttribute.ATTRIBUTE_TYPE attrType = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
|
||||
private static final BlackboardAttribute.ATTRIBUTE_TYPE ATTR_TYPE = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
|
||||
private final boolean skipKnownFiles;
|
||||
private IngestJobContext context = null;
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
@ -76,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule {
|
||||
|
||||
// Skip anything other than actual file system files.
|
||||
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (file.isFile() == false)) {
|
||||
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (file.isFile() == false)) {
|
||||
return IngestModule.ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -101,7 +98,7 @@ class SampleFileIngestModule implements FileIngestModule {
|
||||
|
||||
// Make an attribute using the ID for the attribute attrType that
|
||||
// was previously created.
|
||||
BlackboardAttribute attr = new BlackboardAttribute(attrType, SampleIngestModuleFactory.getModuleName(), count);
|
||||
BlackboardAttribute attr = new BlackboardAttribute(ATTR_TYPE, SampleIngestModuleFactory.getModuleName(), count);
|
||||
|
||||
// Add the to the general info artifact for the file. In a
|
||||
// real module, you would likely have more complex data types
|
||||
@ -113,13 +110,15 @@ class SampleFileIngestModule implements FileIngestModule {
|
||||
// management of shared data.
|
||||
addToBlackboardPostCount(context.getJobId(), 1L);
|
||||
|
||||
// Fire an event to notify any listeners for blackboard postings.
|
||||
ModuleDataEvent event = new ModuleDataEvent(SampleIngestModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_GEN_INFO);
|
||||
IngestServices.getInstance().fireModuleDataEvent(event);
|
||||
/*
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
|
||||
|
||||
return IngestModule.ProcessResult.OK;
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
} catch (TskCoreException | Blackboard.BlackboardException ex) {
|
||||
IngestServices ingestServices = IngestServices.getInstance();
|
||||
Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName());
|
||||
logger.log(Level.SEVERE, "Error processing file (id = " + file.getId() + ")", ex);
|
||||
|
@ -329,14 +329,38 @@ public final class IngestJobSettings {
|
||||
for (IngestModuleFactory moduleFactory : moduleFactories) {
|
||||
loadedModuleNames.add(moduleFactory.getModuleDisplayName());
|
||||
}
|
||||
|
||||
/**
|
||||
* Hard coding Plaso to be disabled by default. loadedModuleNames is
|
||||
* passed below as the default list of enabled modules so briefly remove
|
||||
* Plaso from loaded modules to get the list of enabled and disabled
|
||||
* modules names. Then put Plaso back into loadedModulesNames to let the
|
||||
* rest of the code continue as before.
|
||||
*/
|
||||
final String plasoModuleName = "Plaso";
|
||||
boolean plasoLoaded = loadedModuleNames.contains(plasoModuleName);
|
||||
if (plasoLoaded) {
|
||||
loadedModuleNames.remove(plasoModuleName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the enabled/disabled ingest modules settings for this context. By
|
||||
* default, all loaded modules are enabled.
|
||||
* default, all loaded modules except Plaso are enabled.
|
||||
*/
|
||||
HashSet<String> enabledModuleNames = getModulesNames(executionContext, IngestJobSettings.ENABLED_MODULES_PROPERTY, makeCsvList(loadedModuleNames));
|
||||
HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, ""); //NON-NLS
|
||||
HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, plasoModuleName); //NON-NLS
|
||||
|
||||
// If plaso was loaded, but appears in neither the enabled nor the
|
||||
// disabled list, add it to the disabled list.
|
||||
if (!enabledModuleNames.contains(plasoModuleName) && !disabledModuleNames.contains(plasoModuleName)) {
|
||||
disabledModuleNames.add(plasoModuleName);
|
||||
}
|
||||
|
||||
//Put plaso back into loadedModuleNames
|
||||
if (plasoLoaded) {
|
||||
loadedModuleNames.add(plasoModuleName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check for missing modules and create warnings if any are found.
|
||||
*/
|
||||
|
@ -104,9 +104,13 @@ public final class IngestServices {
|
||||
*
|
||||
* @param moduleDataEvent A module data event, i.e., an event that
|
||||
* encapsulates artifact data.
|
||||
*
|
||||
* @deprecated use org.sleuthkit.datamodel.Blackboard.postArtifact instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
|
||||
IngestManager.getInstance().fireIngestModuleDataEvent(moduleDataEvent);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -171,7 +175,6 @@ public final class IngestServices {
|
||||
*
|
||||
* @param moduleName A unique identifier for the module.
|
||||
* @param settings A mapping of setting names to setting values.
|
||||
*
|
||||
*/
|
||||
public void setConfigSettings(String moduleName, Map<String, String> settings) {
|
||||
ModuleSettings.setConfigSettings(moduleName, settings);
|
||||
|
@ -53,7 +53,7 @@ public class ModuleDataEvent extends ChangeEvent {
|
||||
private Collection<BlackboardArtifact> artifacts;
|
||||
|
||||
/**
|
||||
* @param moduleName Module name
|
||||
* @param moduleName Module name
|
||||
* @param artifactType Type of artifact that was posted to blackboard
|
||||
*/
|
||||
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType) {
|
||||
@ -63,9 +63,9 @@ public class ModuleDataEvent extends ChangeEvent {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param moduleName Module Name
|
||||
* @param moduleName Module Name
|
||||
* @param blackboardArtifactType Type of the blackboard artifact posted to
|
||||
* the blackboard
|
||||
* the blackboard
|
||||
*/
|
||||
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType) {
|
||||
super(blackboardArtifactType);
|
||||
@ -74,10 +74,10 @@ public class ModuleDataEvent extends ChangeEvent {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param moduleName Module name
|
||||
* @param moduleName Module name
|
||||
* @param blackboardArtifactType Type of artifact posted to the blackboard
|
||||
* @param artifacts List of specific artifact ID values that were added to
|
||||
* blackboard
|
||||
* @param artifacts List of specific artifact ID values that
|
||||
* were added to blackboard
|
||||
*/
|
||||
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType, Collection<BlackboardArtifact> artifacts) {
|
||||
this(moduleName, blackboardArtifactType);
|
||||
@ -85,10 +85,10 @@ public class ModuleDataEvent extends ChangeEvent {
|
||||
}
|
||||
|
||||
/**
|
||||
* @param moduleName Module name
|
||||
* @param moduleName Module name
|
||||
* @param artifactType Type of artifact that was posted to blackboard
|
||||
* @param artifacts List of specific artifact values that were added to
|
||||
* blackboard
|
||||
* @param artifacts List of specific artifact values that were added to
|
||||
* blackboard
|
||||
*/
|
||||
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType, Collection<BlackboardArtifact> artifacts) {
|
||||
this(moduleName, artifactType);
|
||||
|
@ -48,13 +48,20 @@ public interface KeywordSearchService extends Closeable {
|
||||
* all of its attributes.
|
||||
*
|
||||
* @param artifact The artifact to index.
|
||||
*
|
||||
* @deprecated Call org.sleuthkit.datamodel.Blackboard.postArtifact
|
||||
* instead.
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
@Deprecated
|
||||
public void indexArtifact(BlackboardArtifact artifact) throws TskCoreException;
|
||||
|
||||
/**
|
||||
* Add the given Content object to the text index.
|
||||
* Add the given Content object to the text index. This message should only
|
||||
* be used in atypical cases, such as indexing a report. Artifacts are indexed
|
||||
* when org.sleuthkit.datamodel.Blackboard.postArtifact
|
||||
* is called and files are indexed during ingest.
|
||||
*
|
||||
* @param content The content to index.
|
||||
*
|
||||
|
@ -25,6 +25,7 @@ import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
import java.util.Arrays;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
@ -38,6 +39,7 @@ import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskDataException;
|
||||
@ -297,11 +299,12 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
|
||||
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId());
|
||||
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment));
|
||||
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(DataSourceIntegrityModuleFactory.getModuleName(),
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED));
|
||||
Case.getCurrentCase().getServices().getCaseBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
|
||||
}
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
Exceptions.printStackTrace(ex);
|
||||
}
|
||||
}
|
||||
|
||||
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
|
||||
|
@ -34,21 +34,20 @@ import java.util.logging.Level;
|
||||
import net.sf.sevenzipjbinding.ArchiveFormat;
|
||||
import static net.sf.sevenzipjbinding.ArchiveFormat.RAR;
|
||||
import net.sf.sevenzipjbinding.ExtractAskMode;
|
||||
import net.sf.sevenzipjbinding.ISequentialOutStream;
|
||||
import net.sf.sevenzipjbinding.ISevenZipInArchive;
|
||||
import net.sf.sevenzipjbinding.SevenZip;
|
||||
import net.sf.sevenzipjbinding.SevenZipException;
|
||||
import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
|
||||
import net.sf.sevenzipjbinding.ExtractOperationResult;
|
||||
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
|
||||
import net.sf.sevenzipjbinding.ICryptoGetTextPassword;
|
||||
import net.sf.sevenzipjbinding.ISequentialOutStream;
|
||||
import net.sf.sevenzipjbinding.ISevenZipInArchive;
|
||||
import net.sf.sevenzipjbinding.PropID;
|
||||
import net.sf.sevenzipjbinding.SevenZip;
|
||||
import net.sf.sevenzipjbinding.SevenZipException;
|
||||
import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
|
||||
import org.netbeans.api.progress.ProgressHandle;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.FileUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
@ -58,36 +57,43 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMonitor;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.DerivedFile;
|
||||
import org.sleuthkit.datamodel.EncodedFileOutputStream;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
class SevenZipExtractor {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName());
|
||||
private IngestServices services = IngestServices.getInstance();
|
||||
private final IngestJobContext context;
|
||||
private final FileTypeDetector fileTypeDetector;
|
||||
private static final String MODULE_NAME = EmbeddedFileExtractorModuleFactory.getModuleName();
|
||||
|
||||
//encryption type strings
|
||||
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
|
||||
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
|
||||
|
||||
//zip bomb detection
|
||||
private static final int MAX_DEPTH = 4;
|
||||
private static final int MAX_COMPRESSION_RATIO = 600;
|
||||
private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L;
|
||||
private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB
|
||||
|
||||
private IngestServices services = IngestServices.getInstance();
|
||||
private final IngestJobContext context;
|
||||
private final FileTypeDetector fileTypeDetector;
|
||||
|
||||
private String moduleDirRelative;
|
||||
private String moduleDirAbsolute;
|
||||
|
||||
@ -244,44 +250,43 @@ class SevenZipExtractor {
|
||||
*/
|
||||
private void flagRootArchiveAsZipBomb(Archive rootArchive, AbstractFile archiveFile, String details, String escapedFilePath) {
|
||||
rootArchive.flagAsZipBomb();
|
||||
logger.log(Level.INFO, details); //NON-NLS
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);
|
||||
logger.log(Level.INFO, details);
|
||||
try {
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
|
||||
"Possible Zip Bomb"));
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
|
||||
EmbeddedFileExtractorModuleFactory.getModuleName(),
|
||||
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())));
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
|
||||
EmbeddedFileExtractorModuleFactory.getModuleName(),
|
||||
details));
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
"Possible Zip Bomb"),
|
||||
new BlackboardAttribute(
|
||||
TSK_DESCRIPTION, MODULE_NAME,
|
||||
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())),
|
||||
new BlackboardAttribute(
|
||||
TSK_COMMENT, MODULE_NAME,
|
||||
details));
|
||||
|
||||
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(archiveFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(TSK_INTERESTING_FILE_HIT);
|
||||
artifact.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(artifact);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
|
||||
String msg = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
|
||||
|
||||
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
|
||||
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
|
||||
}
|
||||
|
||||
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@ -467,13 +472,11 @@ class SevenZipExtractor {
|
||||
}
|
||||
|
||||
/**
|
||||
* Unpack the file to local folder and return a list of derived files
|
||||
* Unpack the file to local folder.
|
||||
*
|
||||
* @param archiveFile file to unpack
|
||||
* @param depthMap - a concurrent hashmap which keeps track of the depth
|
||||
* of all nested archives, key of objectID
|
||||
*
|
||||
* @return true if unpacking is complete
|
||||
*/
|
||||
void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) {
|
||||
unpack(archiveFile, depthMap, null);
|
||||
@ -510,7 +513,7 @@ class SevenZipExtractor {
|
||||
//recursion depth check for zip bomb
|
||||
Archive parentAr;
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
|
||||
unpackSuccessful = false;
|
||||
@ -626,7 +629,7 @@ class SevenZipExtractor {
|
||||
escapedArchiveFilePath, archiveItemPath);
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
|
||||
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
|
||||
logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, archiveItemPath}); //NON-NLS
|
||||
logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS
|
||||
unpackSuccessful = false;
|
||||
@ -654,7 +657,7 @@ class SevenZipExtractor {
|
||||
localFile.createNewFile();
|
||||
} catch (IOException e) {
|
||||
logger.log(Level.SEVERE, "Error creating extracted file: "//NON-NLS
|
||||
+ localFile.getAbsolutePath(), e);
|
||||
+ localFile.getAbsolutePath(), e);
|
||||
}
|
||||
}
|
||||
} catch (SecurityException e) {
|
||||
@ -689,7 +692,7 @@ class SevenZipExtractor {
|
||||
//inArchiveItemIndex. False indicates non-test mode
|
||||
inArchive.extract(extractionIndices, false, archiveCallBack);
|
||||
|
||||
unpackSuccessful = unpackSuccessful & archiveCallBack.wasSuccessful();
|
||||
unpackSuccessful &= archiveCallBack.wasSuccessful();
|
||||
|
||||
archiveDetailsMap = null;
|
||||
|
||||
@ -730,7 +733,7 @@ class SevenZipExtractor {
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
|
||||
escapedArchiveFilePath, ex.getMessage());
|
||||
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
|
||||
}
|
||||
} finally {
|
||||
if (inArchive != null) {
|
||||
@ -760,18 +763,21 @@ class SevenZipExtractor {
|
||||
String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL;
|
||||
try {
|
||||
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
|
||||
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType));
|
||||
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType));
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(artifact);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
|
||||
}
|
||||
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED));
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS
|
||||
}
|
||||
@ -780,8 +786,8 @@ class SevenZipExtractor {
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
|
||||
String details = NbBundle.getMessage(SevenZipExtractor.class,
|
||||
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
|
||||
currentArchiveName, EmbeddedFileExtractorModuleFactory.getModuleName());
|
||||
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
|
||||
currentArchiveName, MODULE_NAME);
|
||||
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
|
||||
}
|
||||
|
||||
// adding unpacked extracted derived files to the job after closing relevant resources.
|
||||
@ -871,7 +877,7 @@ class SevenZipExtractor {
|
||||
private final String localAbsPath;
|
||||
private final String localRelPath;
|
||||
|
||||
public InArchiveItemDetails(
|
||||
InArchiveItemDetails(
|
||||
SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode,
|
||||
String localAbsPath, String localRelPath) {
|
||||
this.unpackedNode = unpackedNode;
|
||||
@ -916,10 +922,10 @@ class SevenZipExtractor {
|
||||
|
||||
private boolean unpackSuccessful = true;
|
||||
|
||||
public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
|
||||
AbstractFile archiveFile, ProgressHandle progressHandle,
|
||||
Map<Integer, InArchiveItemDetails> archiveDetailsMap,
|
||||
String password, long freeDiskSpace) {
|
||||
StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
|
||||
AbstractFile archiveFile, ProgressHandle progressHandle,
|
||||
Map<Integer, InArchiveItemDetails> archiveDetailsMap,
|
||||
String password, long freeDiskSpace) {
|
||||
|
||||
this.inArchive = inArchive;
|
||||
this.progressHandle = progressHandle;
|
||||
@ -944,7 +950,7 @@ class SevenZipExtractor {
|
||||
*/
|
||||
@Override
|
||||
public ISequentialOutStream getStream(int inArchiveItemIndex,
|
||||
ExtractAskMode mode) throws SevenZipException {
|
||||
ExtractAskMode mode) throws SevenZipException {
|
||||
|
||||
this.inArchiveItemIndex = inArchiveItemIndex;
|
||||
|
||||
@ -970,7 +976,7 @@ class SevenZipExtractor {
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
|
||||
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
|
||||
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -1002,7 +1008,7 @@ class SevenZipExtractor {
|
||||
: accessTime.getTime() / 1000;
|
||||
|
||||
progressHandle.progress(archiveFile.getName() + ": "
|
||||
+ (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
|
||||
+ (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
|
||||
inArchiveItemIndex);
|
||||
|
||||
}
|
||||
@ -1017,6 +1023,7 @@ class SevenZipExtractor {
|
||||
*/
|
||||
@Override
|
||||
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
|
||||
|
||||
final SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode
|
||||
= archiveDetailsMap.get(inArchiveItemIndex).getUnpackedNode();
|
||||
final String localRelPath = archiveDetailsMap.get(
|
||||
@ -1218,7 +1225,7 @@ class SevenZipExtractor {
|
||||
if (existingFile == null) {
|
||||
df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(),
|
||||
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
|
||||
node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(),
|
||||
node.isIsFile(), node.getParent().getFile(), "", MODULE_NAME,
|
||||
"", "", TskData.EncodingType.XOR1);
|
||||
statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS));
|
||||
} else {
|
||||
@ -1232,7 +1239,7 @@ class SevenZipExtractor {
|
||||
String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType();
|
||||
df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(),
|
||||
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
|
||||
node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(),
|
||||
node.isIsFile(), mimeType, "", MODULE_NAME,
|
||||
"", "", TskData.EncodingType.XOR1);
|
||||
} else {
|
||||
//ALREADY CURRENT - SKIP
|
||||
@ -1327,8 +1334,8 @@ class SevenZipExtractor {
|
||||
}
|
||||
|
||||
void addDerivedInfo(long size,
|
||||
boolean isFile,
|
||||
long ctime, long crtime, long atime, long mtime, String relLocalPath) {
|
||||
boolean isFile,
|
||||
long ctime, long crtime, long atime, long mtime, String relLocalPath) {
|
||||
this.size = size;
|
||||
this.isFile = isFile;
|
||||
this.ctime = ctime;
|
||||
|
@ -19,26 +19,24 @@
|
||||
package org.sleuthkit.autopsy.modules.encryptiondetection;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.Volume;
|
||||
import org.sleuthkit.datamodel.VolumeSystem;
|
||||
|
||||
@ -57,8 +55,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
|
||||
/**
|
||||
* Create an EncryptionDetectionDataSourceIngestModule object that will
|
||||
* detect volumes that are encrypted and create blackboard artifacts as
|
||||
* appropriate. The supplied EncryptionDetectionIngestJobSettings object is
|
||||
* used to configure the module.
|
||||
* appropriate.
|
||||
*
|
||||
* @param settings The Settings used to configure the module.
|
||||
*/
|
||||
EncryptionDetectionDataSourceIngestModule(EncryptionDetectionIngestJobSettings settings) {
|
||||
minimumEntropy = settings.getMinimumEntropy();
|
||||
@ -67,7 +66,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
|
||||
validateSettings();
|
||||
blackboard = Case.getCurrentCase().getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
@ -144,9 +143,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
|
||||
/**
|
||||
* Create a blackboard artifact.
|
||||
*
|
||||
* @param volume The volume to be processed.
|
||||
* @param volume The volume to be processed.
|
||||
* @param artifactType The type of artifact to create.
|
||||
* @param comment A comment to be attached to the artifact.
|
||||
* @param comment A comment to be attached to the artifact.
|
||||
*
|
||||
* @return 'OK' if the volume was processed successfully, or 'ERROR' if
|
||||
* there was a problem.
|
||||
@ -163,18 +162,14 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
|
||||
|
||||
try {
|
||||
/*
|
||||
* Index the artifact for keyword search.
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.indexArtifact(artifact);
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
/*
|
||||
* Send an event to update the view with the new result.
|
||||
*/
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
|
||||
|
||||
/*
|
||||
* Make an ingest inbox message.
|
||||
*/
|
||||
|
@ -25,13 +25,11 @@ import com.healthmarketscience.jackcess.InvalidCredentialsException;
|
||||
import com.healthmarketscience.jackcess.impl.CodecProvider;
|
||||
import com.healthmarketscience.jackcess.impl.UnsupportedCodecException;
|
||||
import com.healthmarketscience.jackcess.util.MemFileChannel;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.BufferUnderflowException;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.tika.exception.EncryptedDocumentException;
|
||||
import org.apache.tika.exception.TikaException;
|
||||
import org.apache.tika.metadata.Metadata;
|
||||
@ -41,18 +39,18 @@ import org.apache.tika.sax.BodyContentHandler;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -93,9 +91,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
|
||||
/**
|
||||
* Create a EncryptionDetectionFileIngestModule object that will detect
|
||||
* files that are either encrypted or password protected and create
|
||||
* blackboard artifacts as appropriate. The supplied
|
||||
* EncryptionDetectionIngestJobSettings object is used to configure the
|
||||
* module.
|
||||
* blackboard artifacts as appropriate.
|
||||
*
|
||||
* @param settings The settings used to configure the module.
|
||||
*/
|
||||
EncryptionDetectionFileIngestModule(EncryptionDetectionIngestJobSettings settings) {
|
||||
minimumEntropy = settings.getMinimumEntropy();
|
||||
@ -108,8 +106,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
|
||||
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
|
||||
try {
|
||||
validateSettings();
|
||||
this.context = context;
|
||||
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
this.context = context;
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
|
||||
fileTypeDetector = new FileTypeDetector();
|
||||
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
|
||||
throw new IngestModule.IngestModuleException("Failed to create file type detector", ex);
|
||||
@ -131,12 +130,12 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
|
||||
* verify the file hasn't been deleted.
|
||||
*/
|
||||
if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
|
||||
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
|
||||
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
|
||||
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
|
||||
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
|
||||
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
|
||||
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
|
||||
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
|
||||
/*
|
||||
* Is the file in FILE_IGNORE_LIST?
|
||||
*/
|
||||
@ -206,18 +205,14 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
|
||||
|
||||
try {
|
||||
/*
|
||||
* Index the artifact for keyword search.
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.indexArtifact(artifact);
|
||||
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
}
|
||||
|
||||
/*
|
||||
* Send an event to update the view with the new result.
|
||||
*/
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
|
||||
|
||||
/*
|
||||
* Make an ingest inbox message.
|
||||
*/
|
||||
|
@ -1,5 +1,5 @@
|
||||
CannotRunFileTypeDetection=Cannot run file type detection.
|
||||
ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search.
|
||||
ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s).
|
||||
OpenIDE-Module-Display-Category=Ingest Module
|
||||
OpenIDE-Module-Long-Description=\
|
||||
Exif metadata ingest module. \n\n\
|
||||
|
@ -28,39 +28,38 @@ import com.drew.metadata.exif.ExifSubIFDDirectory;
|
||||
import com.drew.metadata.exif.GpsDirectory;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MAKE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MODEL;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
|
||||
@ -70,20 +69,16 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
|
||||
* files. Ingests an image file and, if available, adds it's date, latitude,
|
||||
* longitude, altitude, device model, and device make to a blackboard artifact.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CannotRunFileTypeDetection=Cannot run file type detection."
|
||||
})
|
||||
@NbBundle.Messages({"CannotRunFileTypeDetection=Cannot run file type detection."})
|
||||
public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private final AtomicInteger filesProcessed = new AtomicInteger(0);
|
||||
private static final String MODULE_NAME = ExifParserModuleFactory.getModuleName();
|
||||
private long jobId;
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
private FileTypeDetector fileTypeDetector;
|
||||
private final HashSet<String> supportedMimeTypes = new HashSet<>();
|
||||
private TimeZone timeZone = null;
|
||||
private Case currentCase;
|
||||
private Blackboard blackboard;
|
||||
|
||||
ExifParserFileIngestModule() {
|
||||
@ -103,18 +98,18 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
@Messages({"ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s)."})
|
||||
@Override
|
||||
public ProcessResult process(AbstractFile content) {
|
||||
try {
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
blackboard = currentCase.getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
//skip unalloc
|
||||
if ((content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
|| (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
|
||||
|| (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -135,14 +130,9 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
return processFile(content);
|
||||
}
|
||||
|
||||
@Messages({"ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search."})
|
||||
ProcessResult processFile(AbstractFile file) {
|
||||
InputStream in = null;
|
||||
BufferedInputStream bin = null;
|
||||
private ProcessResult processFile(AbstractFile file) {
|
||||
|
||||
try {
|
||||
in = new ReadContentInputStream(file);
|
||||
bin = new BufferedInputStream(in);
|
||||
try (BufferedInputStream bin = new BufferedInputStream(new ReadContentInputStream(file));) {
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
Metadata metadata = ImageMetadataReader.readMetadata(bin);
|
||||
@ -165,7 +155,7 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
}
|
||||
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
|
||||
if (date != null) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
|
||||
attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, MODULE_NAME, date.getTime() / 1000));
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,15 +164,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
if (gpsDir != null) {
|
||||
GeoLocation loc = gpsDir.getGeoLocation();
|
||||
if (loc != null) {
|
||||
double latitude = loc.getLatitude();
|
||||
double longitude = loc.getLongitude();
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, ExifParserModuleFactory.getModuleName(), latitude));
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, ExifParserModuleFactory.getModuleName(), longitude));
|
||||
attributes.add(new BlackboardAttribute(TSK_GEO_LATITUDE, MODULE_NAME, loc.getLatitude()));
|
||||
attributes.add(new BlackboardAttribute(TSK_GEO_LONGITUDE, MODULE_NAME, loc.getLongitude()));
|
||||
}
|
||||
|
||||
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
|
||||
if (altitude != null) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
|
||||
attributes.add(new BlackboardAttribute(TSK_GEO_ALTITUDE, MODULE_NAME, altitude.doubleValue()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,36 +179,30 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
if (devDir != null) {
|
||||
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
|
||||
if (StringUtils.isNotBlank(model)) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
|
||||
attributes.add(new BlackboardAttribute(TSK_DEVICE_MODEL, MODULE_NAME, model));
|
||||
}
|
||||
|
||||
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
|
||||
if (StringUtils.isNotBlank(make)) {
|
||||
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
|
||||
attributes.add(new BlackboardAttribute(TSK_DEVICE_MAKE, MODULE_NAME, make));
|
||||
}
|
||||
}
|
||||
|
||||
// Add the attributes, if there are any, to a new artifact
|
||||
if (!attributes.isEmpty()) {
|
||||
SleuthkitCase tskCase = currentCase.getSleuthkitCase();
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF, attributes)) {
|
||||
BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
|
||||
if (!blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) {
|
||||
BlackboardArtifact bba = file.newArtifact(TSK_METADATA_EXIF);
|
||||
bba.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(bba);
|
||||
blackboard.postArtifact(bba, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
Bundle.ExifParserFileIngestModule_indexError_message(), bba.getDisplayName());
|
||||
}
|
||||
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(),
|
||||
BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF,
|
||||
Collections.singletonList(bba)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -237,24 +219,12 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, String.format("IOException when parsing image file '%s/%s' (id=%d).", file.getParentPath(), file.getName(), file.getId()), ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
} finally {
|
||||
try {
|
||||
if (in != null) {
|
||||
in.close();
|
||||
}
|
||||
if (bin != null) {
|
||||
bin.close();
|
||||
}
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Failed to close InputStream.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if should try to attempt to extract exif. Currently checks if JPEG
|
||||
* image (by signature)
|
||||
* Checks if should try to attempt to extract exif. Currently checks if
|
||||
* JPEG, TIFF or X-WAV (by signature)
|
||||
*
|
||||
* @param f file to be checked
|
||||
*
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.fileextmismatch;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
@ -26,7 +25,6 @@ import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
@ -34,10 +32,10 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleSettings.CHECK_TYPE;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
@ -110,7 +108,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
|
||||
@Messages({"FileExtMismatchIngestModule.indexError.message=Failed to index file extension mismatch artifact for keyword search."})
|
||||
public ProcessResult process(AbstractFile abstractFile) {
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
@ -121,15 +119,15 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
|
||||
|
||||
// skip non-files
|
||||
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|
||||
|| (abstractFile.isFile() == false)) {
|
||||
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|
||||
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|
||||
|| (abstractFile.isFile() == false)) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
// deleted files often have content that was not theirs and therefor causes mismatch
|
||||
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC))
|
||||
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
|
||||
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -145,14 +143,17 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
|
||||
BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(bart);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());
|
||||
}
|
||||
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(FileExtMismatchDetectorModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart)));
|
||||
}
|
||||
return ProcessResult.OK;
|
||||
} catch (TskException ex) {
|
||||
|
@ -18,40 +18,42 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.filetypeid;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFileTypesException;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Detects the type of a file based on signature (magic) values. Posts results
|
||||
* to the blackboard.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"CannotRunFileTypeDetection=Unable to run file type detection."
|
||||
})
|
||||
@NbBundle.Messages({"CannotRunFileTypeDetection=Unable to run file type detection."})
|
||||
public class FileTypeIdIngestModule implements FileIngestModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
|
||||
private long jobId;
|
||||
private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>();
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
|
||||
private long jobId;
|
||||
private FileTypeDetector fileTypeDetector;
|
||||
|
||||
/**
|
||||
@ -146,26 +148,34 @@ public class FileTypeIdIngestModule implements FileIngestModule {
|
||||
* @param fileType The file type rule for categorizing the hit.
|
||||
*/
|
||||
private void createInterestingFileHit(AbstractFile file, FileType fileType) {
|
||||
|
||||
List<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(),
|
||||
fileType.getInterestingFilesSetName()),
|
||||
new BlackboardAttribute(
|
||||
TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(),
|
||||
fileType.getMimeType()));
|
||||
try {
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()));
|
||||
attributes.add(new BlackboardAttribute(
|
||||
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()));
|
||||
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
|
||||
Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
|
||||
artifact.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
currentCase.getServices().getBlackboard().indexArtifact(artifact);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of this
|
||||
* new artifact
|
||||
*/
|
||||
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
@ -227,5 +237,4 @@ public class FileTypeIdIngestModule implements FileIngestModule {
|
||||
long matchTime = 0;
|
||||
long numFiles = 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
@ -30,7 +29,6 @@ import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
|
||||
@ -39,9 +37,9 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
@ -102,7 +100,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
* object is used to configure the module.
|
||||
*
|
||||
* @param settings The module settings.
|
||||
*
|
||||
*
|
||||
* @throws NoCurrentCaseException If there is no open case.
|
||||
*/
|
||||
HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
|
||||
@ -170,7 +168,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
@Override
|
||||
public ProcessResult process(AbstractFile file) {
|
||||
try {
|
||||
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
@ -178,7 +176,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
|
||||
// Skip unallocated space files.
|
||||
if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|
||||
|| file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
|
||||
|| file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
@ -356,8 +354,11 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
badFile.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(badFile);
|
||||
/*
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(badFile, moduleName);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
@ -400,7 +401,6 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
abstractFile.getName() + md5Hash,
|
||||
badFile));
|
||||
}
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
|
||||
} catch (TskException ex) {
|
||||
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
|
||||
}
|
||||
@ -414,7 +414,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
||||
* @param knownHashSets The list of hash sets for "known" files.
|
||||
*/
|
||||
private static synchronized void postSummary(long jobId,
|
||||
List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
|
||||
List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
|
||||
IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
|
||||
totalsForIngestJobs.remove(jobId);
|
||||
|
||||
|
@ -30,12 +30,12 @@ import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
|
||||
@ -58,7 +58,7 @@ final class CallLogAnalyzer {
|
||||
|
||||
/**
|
||||
* Find call logs given an ingest job context and index the results.
|
||||
*
|
||||
*
|
||||
* @param context The ingest job context.
|
||||
*/
|
||||
public void findCallLogs(IngestJobContext context) {
|
||||
@ -69,7 +69,7 @@ final class CallLogAnalyzer {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
blackboard = openCase.getServices().getBlackboard();
|
||||
blackboard = openCase.getSleuthkitCase().getBlackboard();
|
||||
List<AbstractFile> absFiles;
|
||||
try {
|
||||
SleuthkitCase skCase = openCase.getSleuthkitCase();
|
||||
@ -98,7 +98,7 @@ final class CallLogAnalyzer {
|
||||
|
||||
/**
|
||||
* Index results for call logs found in the database.
|
||||
*
|
||||
*
|
||||
* @param DatabasePath The path to the database.
|
||||
* @param fileId The ID of the file associated with artifacts.
|
||||
*/
|
||||
@ -162,8 +162,12 @@ final class CallLogAnalyzer {
|
||||
|
||||
bba.addAttributes(attributes);
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(bba);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of
|
||||
* this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(bba, moduleName);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
@ -184,7 +188,5 @@ final class CallLogAnalyzer {
|
||||
} catch (Exception e) {
|
||||
logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); //NON-NLS
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -35,12 +35,12 @@ import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
@ -75,7 +75,7 @@ final class ContactAnalyzer {
|
||||
return;
|
||||
}
|
||||
|
||||
blackboard = openCase.getServices().getBlackboard();
|
||||
blackboard = openCase.getSleuthkitCase().getBlackboard();
|
||||
List<AbstractFile> absFiles;
|
||||
try {
|
||||
SleuthkitCase skCase = openCase.getSleuthkitCase();
|
||||
@ -183,7 +183,7 @@ final class ContactAnalyzer {
|
||||
bba.addAttributes(attributes);
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(bba);
|
||||
blackboard.postArtifact(bba, moduleName);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
|
@ -31,12 +31,12 @@ import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.datamodel.ContentUtils;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||
@ -73,7 +73,7 @@ class TextMessageAnalyzer {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
return;
|
||||
}
|
||||
blackboard = openCase.getServices().getBlackboard();
|
||||
blackboard = openCase.getSleuthkitCase().getBlackboard();
|
||||
try {
|
||||
SleuthkitCase skCase = openCase.getSleuthkitCase();
|
||||
absFiles = skCase.findAllFilesWhere("name ='mmssms.db'"); //NON-NLS //get exact file name
|
||||
@ -168,8 +168,11 @@ class TextMessageAnalyzer {
|
||||
|
||||
bba.addAttributes(attributes);
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(bba);
|
||||
/*
|
||||
* post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of
|
||||
* this new artifact
|
||||
*/ blackboard.postArtifact(bba, moduleName);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(
|
||||
@ -191,7 +194,5 @@ class TextMessageAnalyzer {
|
||||
} catch (Exception e) {
|
||||
logger.log(Level.SEVERE, "Error parsing text messages to Blackboard", e); //NON-NLS
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,10 +18,8 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.iOS;
|
||||
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Version;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
|
||||
|
@ -19,8 +19,8 @@
|
||||
package org.sleuthkit.autopsy.modules.interestingitems;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@ -29,7 +29,6 @@ import org.openide.util.NbBundle;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
@ -37,10 +36,13 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
@ -48,15 +50,15 @@ import org.sleuthkit.datamodel.TskData;
|
||||
* A file ingest module that generates interesting files set hit artifacts for
|
||||
* files that match interesting files set definitions.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."
|
||||
})
|
||||
@NbBundle.Messages({"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."})
|
||||
final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
|
||||
private static final Object sharedResourcesLock = new Object();
|
||||
private static final Logger logger = Logger.getLogger(FilesIdentifierIngestModule.class.getName());
|
||||
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
|
||||
private static final Map<Long, List<FilesSet>> interestingFileSetsByJob = new ConcurrentHashMap<>();
|
||||
private static final String MODULE_NAME = InterestingItemsIngestModuleFactory.getModuleName();
|
||||
|
||||
private final FilesIdentifierIngestJobSettings settings;
|
||||
private final IngestServices services = IngestServices.getInstance();
|
||||
private IngestJobContext context;
|
||||
@ -72,9 +74,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
@ -100,21 +99,16 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
@Override
|
||||
@Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."})
|
||||
public ProcessResult process(AbstractFile file) {
|
||||
Case currentCase;
|
||||
try {
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
blackboard = currentCase.getServices().getBlackboard();
|
||||
|
||||
|
||||
// Skip slack space files.
|
||||
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) {
|
||||
return ProcessResult.OK;
|
||||
@ -126,48 +120,46 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
String ruleSatisfied = filesSet.fileIsMemberOf(file);
|
||||
if (ruleSatisfied != null) {
|
||||
try {
|
||||
// Post an interesting files set hit artifact to the
|
||||
// blackboard.
|
||||
String moduleName = InterestingItemsIngestModuleFactory.getModuleName();
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
|
||||
// Add a set name attribute to the artifact. This adds a
|
||||
// fair amount of redundant data to the attributes table
|
||||
// (i.e., rows that differ only in artifact id), but doing
|
||||
// otherwise would requires reworking the interesting files
|
||||
// set hit artifact.
|
||||
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, filesSet.getName());
|
||||
attributes.add(setNameAttribute);
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
/*
|
||||
* Add a set name attribute to the artifact. This
|
||||
* adds a fair amount of redundant data to the
|
||||
* attributes table (i.e., rows that differ only in
|
||||
* artifact id), but doing otherwise would requires
|
||||
* reworking the interesting files set hit artifact. */
|
||||
new BlackboardAttribute(
|
||||
TSK_SET_NAME, MODULE_NAME,
|
||||
filesSet.getName()),
|
||||
/*
|
||||
* Add a category attribute to the artifact to
|
||||
* record the interesting files set membership rule
|
||||
* that was satisfied. */
|
||||
new BlackboardAttribute(
|
||||
TSK_CATEGORY, MODULE_NAME,
|
||||
ruleSatisfied)
|
||||
);
|
||||
|
||||
// Add a category attribute to the artifact to record the
|
||||
// interesting files set membership rule that was satisfied.
|
||||
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, moduleName, ruleSatisfied);
|
||||
attributes.add(ruleNameAttribute);
|
||||
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
|
||||
artifact.addAttributes(attributes);
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
blackboard.indexArtifact(artifact);
|
||||
|
||||
// Post thet artifact to the blackboard.
|
||||
blackboard.postArtifact(artifact, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
|
||||
}
|
||||
|
||||
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
|
||||
|
||||
// make an ingest inbox message
|
||||
StringBuilder detailsSb = new StringBuilder();
|
||||
detailsSb.append("File: " + file.getParentPath() + file.getName() + "<br/>\n");
|
||||
detailsSb.append("Rule Set: " + filesSet.getName());
|
||||
detailsSb.append("File: ").append(file.getParentPath()).append(file.getName()).append("<br/>\n");
|
||||
detailsSb.append("Rule Set: ").append(filesSet.getName());
|
||||
|
||||
services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
|
||||
"Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
|
||||
"Interesting File Match: " + filesSet.getName() + "(" + file.getName() + ")",
|
||||
detailsSb.toString(),
|
||||
file.getName(),
|
||||
artifact));
|
||||
@ -180,9 +172,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
/**
|
||||
* @inheritDoc
|
||||
*/
|
||||
@Override
|
||||
public void shutDown() {
|
||||
if (context != null) {
|
||||
|
5
Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties
Executable file
5
Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties
Executable file
@ -0,0 +1,5 @@
|
||||
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
|
||||
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
|
||||
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
|
||||
PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
|
||||
PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.
|
29
Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties-MERGED
Executable file
29
Core/src/org/sleuthkit/autopsy/modules/plaso/Bundle.properties-MERGED
Executable file
@ -0,0 +1,29 @@
|
||||
# {0} - file that events are from
|
||||
PlasoIngestModule.artifact.progress=Adding events to case: {0}
|
||||
PlasoIngestModule.bad.imageFile=Cannot find image file name and path
|
||||
PlasoIngestModule.completed=Plaso Processing Completed
|
||||
PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation
|
||||
PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image.
|
||||
PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.
|
||||
PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.
|
||||
PlasoIngestModule.error.running.psort=Error running Psort, see log file.
|
||||
PlasoIngestModule.event.datetime=Event Date Time
|
||||
PlasoIngestModule.event.description=Event Description
|
||||
PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.
|
||||
PlasoIngestModule.executable.not.found=Plaso Executable Not Found.
|
||||
PlasoIngestModule.has.run=Plaso Plugin has been run.
|
||||
PlasoIngestModule.info.empty.database=Plaso database was empty.
|
||||
PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled
|
||||
PlasoIngestModule.psort.cancelled=psort run was canceled
|
||||
PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete.
|
||||
PlasoIngestModule.requires.windows=Plaso module requires windows.
|
||||
PlasoIngestModule.running.psort=Running Psort
|
||||
PlasoIngestModule.starting.log2timeline=Starting Log2timeline
|
||||
PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings
|
||||
PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source.
|
||||
PlasoModuleFactory_moduleName=Plaso
|
||||
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
|
||||
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
|
||||
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
|
||||
PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
|
||||
PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.
|
478
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java
Executable file
478
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoIngestModule.java
Executable file
@ -0,0 +1,478 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.plaso;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import static java.util.Objects.nonNull;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import org.openide.modules.InstalledFileLocator;
|
||||
import org.openide.util.Cancellable;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.FileManager;
|
||||
import org.sleuthkit.autopsy.coreutils.ExecUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.Image;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TimelineEventType;
|
||||
|
||||
/**
|
||||
* Data source ingest module that runs Plaso against the image.
|
||||
*/
|
||||
public class PlasoIngestModule implements DataSourceIngestModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(PlasoIngestModule.class.getName());
|
||||
private static final String MODULE_NAME = PlasoModuleFactory.getModuleName();
|
||||
|
||||
private static final String PLASO = "plaso"; //NON-NLS
|
||||
private static final String PLASO64 = "plaso-20180818-amd64";//NON-NLS
|
||||
private static final String PLASO32 = "plaso-20180818-win32";//NON-NLS
|
||||
private static final String LOG2TIMELINE_EXECUTABLE = "Log2timeline.exe";//NON-NLS
|
||||
private static final String PSORT_EXECUTABLE = "psort.exe";//NON-NLS
|
||||
private static final String COOKIE = "cookie";//NON-NLS
|
||||
private static final int LOG2TIMELINE_WORKERS = 2;
|
||||
|
||||
private File log2TimeLineExecutable;
|
||||
private File psortExecutable;
|
||||
|
||||
private final PlasoModuleSettings settings;
|
||||
private IngestJobContext context;
|
||||
private Case currentCase;
|
||||
private FileManager fileManager;
|
||||
|
||||
private Image image;
|
||||
private AbstractFile previousFile = null; // cache used when looking up files in Autopsy DB
|
||||
|
||||
PlasoIngestModule(PlasoModuleSettings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"PlasoIngestModule.executable.not.found=Plaso Executable Not Found.",
|
||||
"PlasoIngestModule.requires.windows=Plaso module requires windows.",
|
||||
"PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image."})
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
this.context = context;
|
||||
|
||||
if (false == PlatformUtil.isWindowsOS()) {
|
||||
throw new IngestModuleException(Bundle.PlasoIngestModule_requires_windows());
|
||||
}
|
||||
|
||||
try {
|
||||
log2TimeLineExecutable = locateExecutable(LOG2TIMELINE_EXECUTABLE);
|
||||
psortExecutable = locateExecutable(PSORT_EXECUTABLE);
|
||||
} catch (FileNotFoundException exception) {
|
||||
logger.log(Level.WARNING, "Plaso executable not found.", exception); //NON-NLS
|
||||
throw new IngestModuleException(Bundle.PlasoIngestModule_executable_not_found(), exception);
|
||||
}
|
||||
|
||||
Content dataSource = context.getDataSource();
|
||||
if (!(dataSource instanceof Image)) {
|
||||
throw new IngestModuleException(Bundle.PlasoIngestModule_dataSource_not_an_image());
|
||||
}
|
||||
image = (Image) dataSource;
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.",
|
||||
"PlasoIngestModule.error.running.psort=Error running Psort, see log file.",
|
||||
"PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.",
|
||||
"PlasoIngestModule.starting.log2timeline=Starting Log2timeline",
|
||||
"PlasoIngestModule.running.psort=Running Psort",
|
||||
"PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled",
|
||||
"PlasoIngestModule.psort.cancelled=psort run was canceled",
|
||||
"PlasoIngestModule.bad.imageFile=Cannot find image file name and path",
|
||||
"PlasoIngestModule.completed=Plaso Processing Completed",
|
||||
"PlasoIngestModule.has.run=Plaso Plugin has been run.",
|
||||
"PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete."})
|
||||
@Override
|
||||
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
|
||||
assert dataSource.equals(image);
|
||||
|
||||
statusHelper.switchToDeterminate(100);
|
||||
currentCase = Case.getCurrentCase();
|
||||
fileManager = currentCase.getServices().getFileManager();
|
||||
|
||||
String currentTime = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss z", Locale.US).format(System.currentTimeMillis());//NON-NLS
|
||||
Path moduleOutputPath = Paths.get(currentCase.getModuleDirectory(), PLASO, currentTime);
|
||||
try {
|
||||
Files.createDirectories(moduleOutputPath);
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, "Error creating Plaso module output directory.", ex); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
// Run log2timeline
|
||||
logger.log(Level.INFO, "Starting Plaso Run.");//NON-NLS
|
||||
statusHelper.progress(Bundle.PlasoIngestModule_starting_log2timeline(), 0);
|
||||
ProcessBuilder log2TimeLineCommand = buildLog2TimeLineCommand(moduleOutputPath, image);
|
||||
try {
|
||||
Process log2TimeLineProcess = log2TimeLineCommand.start();
|
||||
try (BufferedReader log2TimeLineOutpout = new BufferedReader(new InputStreamReader(log2TimeLineProcess.getInputStream()))) {
|
||||
L2TStatusProcessor statusReader = new L2TStatusProcessor(log2TimeLineOutpout, statusHelper, moduleOutputPath);
|
||||
new Thread(statusReader, "log2timeline status reader").start(); //NON-NLS
|
||||
ExecUtil.waitForTermination(LOG2TIMELINE_EXECUTABLE, log2TimeLineProcess, new DataSourceIngestModuleProcessTerminator(context));
|
||||
statusReader.cancel();
|
||||
}
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
logger.log(Level.INFO, "Log2timeline run was canceled"); //NON-NLS
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
if (Files.notExists(moduleOutputPath.resolve(PLASO))) {
|
||||
logger.log(Level.WARNING, "Error running log2timeline: there was no storage file."); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
// sort the output
|
||||
statusHelper.progress(Bundle.PlasoIngestModule_running_psort(), 33);
|
||||
ProcessBuilder psortCommand = buildPsortCommand(moduleOutputPath);
|
||||
int result = ExecUtil.execute(psortCommand, new DataSourceIngestModuleProcessTerminator(context));
|
||||
if (result != 0) {
|
||||
logger.log(Level.SEVERE, String.format("Error running Psort, error code returned %d", result)); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(MODULE_NAME, Bundle.PlasoIngestModule_psort_fail());
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
logger.log(Level.INFO, "psort run was canceled"); //NON-NLS
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
Path plasoFile = moduleOutputPath.resolve("plasodb.db3"); //NON-NLS
|
||||
if (Files.notExists(plasoFile)) {
|
||||
logger.log(Level.SEVERE, "Error running Psort: there was no sqlite db file."); //NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
// parse the output and make artifacts
|
||||
createPlasoArtifacts(plasoFile.toString(), statusHelper);
|
||||
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.SEVERE, "Error running Plaso.", ex);//NON-NLS
|
||||
return ProcessResult.ERROR;
|
||||
}
|
||||
|
||||
IngestMessage message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
|
||||
Bundle.PlasoIngestModule_has_run(),
|
||||
Bundle.PlasoIngestModule_completed());
|
||||
IngestServices.getInstance().postMessage(message);
|
||||
return ProcessResult.OK;
|
||||
}
|
||||
|
||||
private ProcessBuilder buildLog2TimeLineCommand(Path moduleOutputPath, Image image) {
|
||||
//make a csv list of disabled parsers.
|
||||
String parsersString = settings.getParsers().entrySet().stream()
|
||||
.filter(entry -> entry.getValue() == false)
|
||||
.map(entry -> "!" + entry.getKey()) // '!' prepended to parsername disables it. //NON-NLS
|
||||
.collect(Collectors.joining(","));//NON-NLS
|
||||
|
||||
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
|
||||
"\"" + log2TimeLineExecutable + "\"", //NON-NLS
|
||||
"--vss-stores", "all", //NON-NLS
|
||||
"-z", image.getTimeZone(), //NON-NLS
|
||||
"--partitions", "all", //NON-NLS
|
||||
"--hasher_file_size_limit", "1", //NON-NLS
|
||||
"--hashers", "none", //NON-NLS
|
||||
"--parsers", "\"" + parsersString + "\"",//NON-NLS
|
||||
"--no_dependencies_check", //NON-NLS
|
||||
"--workers", String.valueOf(LOG2TIMELINE_WORKERS),//NON-NLS
|
||||
moduleOutputPath.resolve(PLASO).toString(),
|
||||
image.getPaths()[0]
|
||||
);
|
||||
processBuilder.redirectError(moduleOutputPath.resolve("log2timeline_err.txt").toFile()); //NON-NLS
|
||||
return processBuilder;
|
||||
}
|
||||
|
||||
static private ProcessBuilder buildProcessWithRunAsInvoker(String... commandLine) {
|
||||
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
|
||||
/* Add an environment variable to force log2timeline/psort to run with
|
||||
* the same permissions Autopsy uses. */
|
||||
processBuilder.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS
|
||||
return processBuilder;
|
||||
}
|
||||
|
||||
private ProcessBuilder buildPsortCommand(Path moduleOutputPath) {
|
||||
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
|
||||
"\"" + psortExecutable + "\"", //NON-NLS
|
||||
"-o", "4n6time_sqlite", //NON-NLS
|
||||
"-w", moduleOutputPath.resolve("plasodb.db3").toString(), //NON-NLS
|
||||
moduleOutputPath.resolve(PLASO).toString()
|
||||
);
|
||||
|
||||
processBuilder.redirectOutput(moduleOutputPath.resolve("psort_output.txt").toFile()); //NON-NLS
|
||||
processBuilder.redirectError(moduleOutputPath.resolve("psort_err.txt").toFile()); //NON-NLS
|
||||
return processBuilder;
|
||||
}
|
||||
|
||||
private static File locateExecutable(String executableName) throws FileNotFoundException {
|
||||
String architectureFolder = PlatformUtil.is64BitOS() ? PLASO64 : PLASO32;
|
||||
String executableToFindName = Paths.get(PLASO, architectureFolder, executableName).toString();
|
||||
|
||||
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PlasoIngestModule.class.getPackage().getName(), false);
|
||||
if (null == exeFile || exeFile.canExecute() == false) {
|
||||
throw new FileNotFoundException(executableName + " executable not found.");
|
||||
}
|
||||
return exeFile;
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.",
|
||||
"PlasoIngestModule.event.datetime=Event Date Time",
|
||||
"PlasoIngestModule.event.description=Event Description",
|
||||
"PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation ",
|
||||
"# {0} - file that events are from",
|
||||
"PlasoIngestModule.artifact.progress=Adding events to case: {0}",
|
||||
"PlasoIngestModule.info.empty.database=Plaso database was empty.",
|
||||
})
|
||||
private void createPlasoArtifacts(String plasoDb, DataSourceIngestModuleProgress statusHelper) {
|
||||
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
|
||||
String sqlStatement = "SELECT substr(filename,1) AS filename, "
|
||||
+ " strftime('%s', datetime) AS epoch_date, "
|
||||
+ " description, "
|
||||
+ " source, "
|
||||
+ " type, "
|
||||
+ " sourcetype "
|
||||
+ " FROM log2timeline "
|
||||
+ " WHERE source NOT IN ('FILE', "
|
||||
+ " 'WEBHIST') " // bad dates and duplicates with what we have.
|
||||
+ " AND sourcetype NOT IN ('UNKNOWN', "
|
||||
+ " 'PE Import Time');"; // lots of bad dates //NON-NLS
|
||||
|
||||
try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + plasoDb); //NON-NLS
|
||||
ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) {
|
||||
|
||||
boolean dbHasData = false;
|
||||
|
||||
while (resultSet.next()) {
|
||||
dbHasData = true;
|
||||
|
||||
if (context.dataSourceIngestIsCancelled()) {
|
||||
logger.log(Level.INFO, "Cancelled Plaso Artifact Creation."); //NON-NLS
|
||||
return;
|
||||
}
|
||||
|
||||
String currentFileName = resultSet.getString("filename"); //NON-NLS
|
||||
statusHelper.progress(Bundle.PlasoIngestModule_artifact_progress(currentFileName), 66);
|
||||
Content resolvedFile = getAbstractFile(currentFileName);
|
||||
if (resolvedFile == null) {
|
||||
logger.log(Level.INFO, "File {0} from Plaso output not found in case. Associating it with the data source instead.", currentFileName);//NON-NLS
|
||||
resolvedFile = image;
|
||||
}
|
||||
|
||||
String description = resultSet.getString("description");
|
||||
TimelineEventType eventType = findEventSubtype(currentFileName, resultSet);
|
||||
|
||||
// If the description is empty use the event type display name
|
||||
// as the description.
|
||||
if ( description == null || description.isEmpty() ) {
|
||||
if (eventType != TimelineEventType.OTHER) {
|
||||
description = eventType.getDisplayName();
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
Collection<BlackboardAttribute> bbattributes = Arrays.asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_DATETIME, MODULE_NAME,
|
||||
resultSet.getLong("epoch_date")), //NON-NLS
|
||||
new BlackboardAttribute(
|
||||
TSK_DESCRIPTION, MODULE_NAME,
|
||||
description),//NON-NLS
|
||||
new BlackboardAttribute(
|
||||
TSK_TL_EVENT_TYPE, MODULE_NAME,
|
||||
eventType.getTypeID()));
|
||||
|
||||
try {
|
||||
BlackboardArtifact bbart = resolvedFile.newArtifact(TSK_TL_EVENT);
|
||||
bbart.addAttributes(bbattributes);
|
||||
try {
|
||||
/* Post the artifact which will index the artifact for
|
||||
* keyword search, and fire an event to notify UI of
|
||||
* this new artifact */
|
||||
blackboard.postArtifact(bbart, MODULE_NAME);
|
||||
} catch (BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there is data the db
|
||||
if( !dbHasData ) {
|
||||
logger.log(Level.INFO, String.format("PlasoDB was empty: %s", plasoDb));
|
||||
MessageNotifyUtil.Notify.info(MODULE_NAME, Bundle.PlasoIngestModule_info_empty_database());
|
||||
}
|
||||
} catch (SQLException ex) {
|
||||
logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
private AbstractFile getAbstractFile(String file) {
|
||||
|
||||
Path path = Paths.get(file);
|
||||
String fileName = path.getFileName().toString();
|
||||
String filePath = path.getParent().toString().replaceAll("\\\\", "/");//NON-NLS
|
||||
if (filePath.endsWith("/") == false) {//NON-NLS
|
||||
filePath += "/";//NON-NLS
|
||||
}
|
||||
|
||||
// check the cached file
|
||||
//TODO: would we reduce 'cache misses' if we retrieved the events sorted by file? Is that overhead worth it?
|
||||
if (previousFile != null
|
||||
&& previousFile.getName().equalsIgnoreCase(fileName)
|
||||
&& previousFile.getParentPath().equalsIgnoreCase(filePath)) {
|
||||
return previousFile;
|
||||
|
||||
}
|
||||
try {
|
||||
List<AbstractFile> abstractFiles = fileManager.findFiles(fileName, filePath);
|
||||
if (abstractFiles.size() == 1) {// TODO: why do we bother with this check. also we don't cache the file...
|
||||
return abstractFiles.get(0);
|
||||
}
|
||||
for (AbstractFile resolvedFile : abstractFiles) {
|
||||
// double check its an exact match
|
||||
if (filePath.equalsIgnoreCase(resolvedFile.getParentPath())) {
|
||||
// cache it for next time
|
||||
previousFile = resolvedFile;
|
||||
return resolvedFile;
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Exception finding file.", ex);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the event_type_id of the event from the plaso information.
|
||||
*
|
||||
* @param fileName The name of the file this event is from.
|
||||
* @param row The row returned from the log2timeline table of th eplaso
|
||||
* output.
|
||||
*
|
||||
* @return the event_type_id of the EventType of the given event.
|
||||
*
|
||||
* @throws SQLException
|
||||
*/
|
||||
private TimelineEventType findEventSubtype(String fileName, ResultSet row) throws SQLException {
|
||||
switch (row.getString("source")) {
|
||||
case "WEBHIST": //These shouldn't actually be present, but keeping the logic just in case...
|
||||
if (fileName.toLowerCase().contains(COOKIE)
|
||||
|| row.getString("type").toLowerCase().contains(COOKIE)) {//NON-NLS
|
||||
|
||||
return TimelineEventType.WEB_COOKIE;
|
||||
} else {
|
||||
return TimelineEventType.WEB_HISTORY;
|
||||
}
|
||||
case "EVT":
|
||||
case "LOG":
|
||||
return TimelineEventType.LOG_ENTRY;
|
||||
case "REG":
|
||||
switch (row.getString("sourcetype").toLowerCase()) {//NON-NLS
|
||||
case "unknown : usb entries":
|
||||
case "unknown : usbstor entries":
|
||||
return TimelineEventType.DEVICES_ATTACHED;
|
||||
default:
|
||||
return TimelineEventType.REGISTRY;
|
||||
}
|
||||
default:
|
||||
return TimelineEventType.OTHER;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs in a thread and reads the output of log2timeline. It redirectes the
|
||||
* output both to a log file, and to the status message of the Plaso ingest
|
||||
* module progress bar.
|
||||
*/
|
||||
private static class L2TStatusProcessor implements Runnable, Cancellable {
|
||||
|
||||
private final BufferedReader log2TimeLineOutpout;
|
||||
private final DataSourceIngestModuleProgress statusHelper;
|
||||
volatile private boolean cancelled = false;
|
||||
private final Path outputPath;
|
||||
|
||||
private L2TStatusProcessor(BufferedReader log2TimeLineOutpout, DataSourceIngestModuleProgress statusHelper, Path outputPath) throws IOException {
|
||||
this.log2TimeLineOutpout = log2TimeLineOutpout;
|
||||
this.statusHelper = statusHelper;
|
||||
this.outputPath = outputPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try (BufferedWriter writer = Files.newBufferedWriter(outputPath.resolve("log2timeline_output.txt"));) {//NON-NLS
|
||||
String line = log2TimeLineOutpout.readLine();
|
||||
while (cancelled == false && nonNull(line)) {
|
||||
statusHelper.progress(line);
|
||||
writer.write(line);
|
||||
writer.newLine();
|
||||
line = log2TimeLineOutpout.readLine();
|
||||
}
|
||||
writer.flush();
|
||||
} catch (IOException ex) {
|
||||
logger.log(Level.WARNING, "Error reading log2timeline output stream.", ex);//NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean cancel() {
|
||||
cancelled = true;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
112
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleFactory.java
Executable file
112
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleFactory.java
Executable file
@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.plaso;
|
||||
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.lookup.ServiceProvider;
|
||||
import org.sleuthkit.autopsy.coreutils.Version;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
|
||||
|
||||
/**
|
||||
* A factory that creates data source ingest modules that run Plaso against an
|
||||
* image and saves the storage file to module output.
|
||||
*/
|
||||
@ServiceProvider(service = IngestModuleFactory.class)
|
||||
@NbBundle.Messages({"PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings"})
|
||||
public class PlasoModuleFactory implements IngestModuleFactory {
|
||||
|
||||
@NbBundle.Messages({"PlasoModuleFactory_moduleName=Plaso"})
|
||||
static String getModuleName() {
|
||||
return Bundle.PlasoModuleFactory_moduleName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getModuleDisplayName() {
|
||||
return getModuleName();
|
||||
}
|
||||
|
||||
@NbBundle.Messages({"PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source."})
|
||||
@Override
|
||||
public String getModuleDescription() {
|
||||
return Bundle.PlasoModuleFactory_moduleDesc();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getModuleVersionNumber() {
|
||||
return Version.getVersion();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDataSourceIngestModuleFactory() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings settings) {
|
||||
assert settings instanceof PlasoModuleSettings;
|
||||
if (settings instanceof PlasoModuleSettings) {
|
||||
return new PlasoIngestModule((PlasoModuleSettings) settings);
|
||||
}
|
||||
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasGlobalSettingsPanel() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IngestModuleGlobalSettingsPanel getGlobalSettingsPanel() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
|
||||
return new PlasoModuleSettings();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasIngestJobSettingsPanel() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
|
||||
assert settings instanceof PlasoModuleSettings;
|
||||
if (settings instanceof PlasoModuleSettings) {
|
||||
return new PlasoModuleSettingsPanel((PlasoModuleSettings) settings);
|
||||
}
|
||||
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFileIngestModuleFactory() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
92
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettings.java
Executable file
92
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettings.java
Executable file
@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.plaso;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
|
||||
|
||||
/**
|
||||
* Settings for the Plaso Ingest Module.
|
||||
*/
|
||||
public class PlasoModuleSettings implements IngestModuleIngestJobSettings {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/** Map from parser name (or match pattern) to its enabled state. */
|
||||
final Map<String, Boolean> parsers = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Get an immutable map from parser name to its enabled state. Parsers
|
||||
* mapped to true or with no entry will be enabled. Parsers mapped to false,
|
||||
* will be disabled.
|
||||
*/
|
||||
Map<String, Boolean> getParsers() {
|
||||
return ImmutableMap.copyOf(parsers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor. The PlasoModuleSettings will have the default parsers
|
||||
* (winreg, pe, chrome, firefox, internet explorer) disabled.
|
||||
*/
|
||||
public PlasoModuleSettings() {
|
||||
parsers.put("winreg", false);
|
||||
parsers.put("pe", false);
|
||||
|
||||
//chrome
|
||||
parsers.put("chrome_preferences", false);
|
||||
parsers.put("chrome_cache", false);
|
||||
parsers.put("chrome_27_history", false);
|
||||
parsers.put("chrome_8_history", false);
|
||||
parsers.put("chrome_cookies", false);
|
||||
parsers.put("chrome_extension_activity", false);
|
||||
|
||||
//firefox
|
||||
parsers.put("firefox_cache", false);
|
||||
parsers.put("firefox_cache2", false);
|
||||
parsers.put("firefox_cookies", false);
|
||||
parsers.put("firefox_downloads", false);
|
||||
parsers.put("firefox_history", false);
|
||||
|
||||
//Internet Explorer
|
||||
parsers.put("msiecf", false);
|
||||
parsers.put("msie_webcache", false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the serialization version number.
|
||||
*
|
||||
* @return A serialization version number.
|
||||
*/
|
||||
@Override
|
||||
public long getVersionNumber() {
|
||||
return serialVersionUID;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the given parser enabled/disabled
|
||||
*
|
||||
* @param parserName The name of the parser to enable/disable
|
||||
* @param selected The new state (enabled/disabled) for the given parser.
|
||||
*/
|
||||
void setParserEnabled(String parserName, boolean selected) {
|
||||
parsers.put(parserName, selected);
|
||||
}
|
||||
}
|
104
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.form
Executable file
104
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.form
Executable file
@ -0,0 +1,104 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
|
||||
<NonVisualComponents>
|
||||
<Component class="javax.swing.JFileChooser" name="jFileChooser1">
|
||||
</Component>
|
||||
</NonVisualComponents>
|
||||
<AuxValues>
|
||||
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="1"/>
|
||||
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
|
||||
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="true"/>
|
||||
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="true"/>
|
||||
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
|
||||
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
|
||||
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
|
||||
<AuxValue name="designerSize" type="java.awt.Dimension" value="-84,-19,0,5,115,114,0,18,106,97,118,97,46,97,119,116,46,68,105,109,101,110,115,105,111,110,65,-114,-39,-41,-84,95,68,20,2,0,2,73,0,6,104,101,105,103,104,116,73,0,5,119,105,100,116,104,120,112,0,0,1,-68,0,0,1,-36"/>
|
||||
</AuxValues>
|
||||
|
||||
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
|
||||
<SubComponents>
|
||||
<Component class="javax.swing.JCheckBox" name="winRegCheckBox">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.winRegCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="winRegCheckBoxActionPerformed"/>
|
||||
</Events>
|
||||
<Constraints>
|
||||
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
|
||||
<GridBagConstraints gridX="0" gridY="2" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="15" insetsBottom="5" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
|
||||
</Constraint>
|
||||
</Constraints>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="peCheckBox">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.peCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="peCheckBoxActionPerformed"/>
|
||||
</Events>
|
||||
<Constraints>
|
||||
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
|
||||
<GridBagConstraints gridX="0" gridY="3" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
|
||||
</Constraint>
|
||||
</Constraints>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextArea" name="plasoParserInfoTextArea">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="background" type="java.awt.Color" editor="org.netbeans.beaninfo.editors.ColorEditor">
|
||||
<Color blue="f0" green="f0" id="Panel.background" palette="3" red="f0" type="palette"/>
|
||||
</Property>
|
||||
<Property name="columns" type="int" value="20"/>
|
||||
<Property name="lineWrap" type="boolean" value="true"/>
|
||||
<Property name="rows" type="int" value="1"/>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.plasoParserInfoTextArea.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
<Property name="wrapStyleWord" type="boolean" value="true"/>
|
||||
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
|
||||
<Border info="null"/>
|
||||
</Property>
|
||||
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[160, 50]"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Constraints>
|
||||
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
|
||||
<GridBagConstraints gridX="0" gridY="1" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="5" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
|
||||
</Constraint>
|
||||
</Constraints>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="noteLabel">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.noteLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Constraints>
|
||||
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
|
||||
<GridBagConstraints gridX="0" gridY="0" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="9" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
|
||||
</Constraint>
|
||||
</Constraints>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="disabledNoteLabel">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
|
||||
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.disabledNoteLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, "{key}")"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Constraints>
|
||||
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
|
||||
<GridBagConstraints gridX="0" gridY="4" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="5" insetsLeft="15" insetsBottom="0" insetsRight="15" anchor="18" weightX="1.0" weightY="1.0"/>
|
||||
</Constraint>
|
||||
</Constraints>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Form>
|
146
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.java
Executable file
146
Core/src/org/sleuthkit/autopsy/modules/plaso/PlasoModuleSettingsPanel.java
Executable file
@ -0,0 +1,146 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.plaso;
|
||||
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
|
||||
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
|
||||
|
||||
/**
|
||||
* Settings panel for the PlasoIngestModule.
|
||||
*/
|
||||
public class PlasoModuleSettingsPanel extends IngestModuleIngestJobSettingsPanel {
|
||||
|
||||
private final PlasoModuleSettings settings;
|
||||
|
||||
public PlasoModuleSettingsPanel(PlasoModuleSettings settings) {
|
||||
this.settings = settings;
|
||||
initComponents();
|
||||
}
|
||||
|
||||
/** This method is called from within the constructor to initialize the
|
||||
* form. WARNING: Do NOT modify this code. The content of this method is
|
||||
* always regenerated by the Form Editor.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
|
||||
private void initComponents() {
|
||||
java.awt.GridBagConstraints gridBagConstraints;
|
||||
|
||||
jFileChooser1 = new javax.swing.JFileChooser();
|
||||
winRegCheckBox = new javax.swing.JCheckBox();
|
||||
peCheckBox = new javax.swing.JCheckBox();
|
||||
plasoParserInfoTextArea = new javax.swing.JTextArea();
|
||||
noteLabel = new javax.swing.JLabel();
|
||||
disabledNoteLabel = new javax.swing.JLabel();
|
||||
|
||||
setLayout(new java.awt.GridBagLayout());
|
||||
|
||||
org.openide.awt.Mnemonics.setLocalizedText(winRegCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.winRegCheckBox.text")); // NOI18N
|
||||
winRegCheckBox.addActionListener(new java.awt.event.ActionListener() {
|
||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||
winRegCheckBoxActionPerformed(evt);
|
||||
}
|
||||
});
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
gridBagConstraints.gridy = 2;
|
||||
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
|
||||
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
gridBagConstraints.insets = new java.awt.Insets(0, 15, 5, 15);
|
||||
add(winRegCheckBox, gridBagConstraints);
|
||||
|
||||
org.openide.awt.Mnemonics.setLocalizedText(peCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.peCheckBox.text")); // NOI18N
|
||||
peCheckBox.addActionListener(new java.awt.event.ActionListener() {
|
||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||
peCheckBoxActionPerformed(evt);
|
||||
}
|
||||
});
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
gridBagConstraints.gridy = 3;
|
||||
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
|
||||
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
gridBagConstraints.insets = new java.awt.Insets(0, 15, 9, 15);
|
||||
add(peCheckBox, gridBagConstraints);
|
||||
|
||||
plasoParserInfoTextArea.setEditable(false);
|
||||
plasoParserInfoTextArea.setBackground(javax.swing.UIManager.getDefaults().getColor("Panel.background"));
|
||||
plasoParserInfoTextArea.setColumns(20);
|
||||
plasoParserInfoTextArea.setLineWrap(true);
|
||||
plasoParserInfoTextArea.setRows(1);
|
||||
plasoParserInfoTextArea.setText(org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.plasoParserInfoTextArea.text")); // NOI18N
|
||||
plasoParserInfoTextArea.setWrapStyleWord(true);
|
||||
plasoParserInfoTextArea.setBorder(null);
|
||||
plasoParserInfoTextArea.setPreferredSize(new java.awt.Dimension(160, 50));
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
gridBagConstraints.gridy = 1;
|
||||
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
|
||||
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
gridBagConstraints.insets = new java.awt.Insets(5, 15, 9, 15);
|
||||
add(plasoParserInfoTextArea, gridBagConstraints);
|
||||
|
||||
org.openide.awt.Mnemonics.setLocalizedText(noteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.noteLabel.text")); // NOI18N
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
gridBagConstraints.gridy = 0;
|
||||
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
|
||||
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
gridBagConstraints.insets = new java.awt.Insets(9, 15, 9, 15);
|
||||
add(noteLabel, gridBagConstraints);
|
||||
|
||||
org.openide.awt.Mnemonics.setLocalizedText(disabledNoteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.disabledNoteLabel.text")); // NOI18N
|
||||
gridBagConstraints = new java.awt.GridBagConstraints();
|
||||
gridBagConstraints.gridx = 0;
|
||||
gridBagConstraints.gridy = 4;
|
||||
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
|
||||
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
|
||||
gridBagConstraints.weightx = 1.0;
|
||||
gridBagConstraints.weighty = 1.0;
|
||||
gridBagConstraints.insets = new java.awt.Insets(5, 15, 0, 15);
|
||||
add(disabledNoteLabel, gridBagConstraints);
|
||||
}// </editor-fold>//GEN-END:initComponents
|
||||
|
||||
private void winRegCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_winRegCheckBoxActionPerformed
|
||||
settings.setParserEnabled("winreg", winRegCheckBox.isSelected());
|
||||
}//GEN-LAST:event_winRegCheckBoxActionPerformed
|
||||
|
||||
private void peCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_peCheckBoxActionPerformed
|
||||
settings.setParserEnabled("pe", peCheckBox.isSelected());
|
||||
}//GEN-LAST:event_peCheckBoxActionPerformed
|
||||
|
||||
@Override
|
||||
public IngestModuleIngestJobSettings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
|
||||
// Variables declaration - do not modify//GEN-BEGIN:variables
|
||||
private javax.swing.JLabel disabledNoteLabel;
|
||||
private javax.swing.JFileChooser jFileChooser1;
|
||||
private javax.swing.JLabel noteLabel;
|
||||
private javax.swing.JCheckBox peCheckBox;
|
||||
private javax.swing.JTextArea plasoParserInfoTextArea;
|
||||
private javax.swing.JCheckBox winRegCheckBox;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
}
|
@ -1,15 +1,15 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
*
|
||||
* Copyright 2013-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
@ -18,18 +18,23 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.modules.stix;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle.Messages;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
@ -38,18 +43,20 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
class StixArtifactData {
|
||||
|
||||
private static final String MODULE_NAME = "Stix";
|
||||
|
||||
private AbstractFile file;
|
||||
private final String observableId;
|
||||
private final String objType;
|
||||
private static final Logger logger = Logger.getLogger(StixArtifactData.class.getName());
|
||||
|
||||
public StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
|
||||
StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
|
||||
file = a_file;
|
||||
observableId = a_observableId;
|
||||
objType = a_objType;
|
||||
}
|
||||
|
||||
public StixArtifactData(long a_objId, String a_observableId, String a_objType) {
|
||||
StixArtifactData(long a_objId, String a_observableId, String a_objType) {
|
||||
try {
|
||||
Case case1 = Case.getCurrentCaseThrows();
|
||||
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
|
||||
@ -62,39 +69,35 @@ class StixArtifactData {
|
||||
}
|
||||
|
||||
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
|
||||
"StixArtifactData.noOpenCase.errMsg=No open case available."})
|
||||
"StixArtifactData.noOpenCase.errMsg=No open case available."})
|
||||
public void createArtifact(String a_title) throws TskCoreException {
|
||||
Case currentCase;
|
||||
Blackboard blackboard;
|
||||
try {
|
||||
currentCase = Case.getCurrentCaseThrows();
|
||||
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
String setName;
|
||||
if (a_title != null) {
|
||||
setName = "STIX Indicator - " + a_title; //NON-NLS
|
||||
} else {
|
||||
setName = "STIX Indicator - (no title)"; //NON-NLS
|
||||
}
|
||||
|
||||
Collection<BlackboardAttribute> attributes = new ArrayList<>();
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, "Stix", setName)); //NON-NLS
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, "Stix", observableId)); //NON-NLS
|
||||
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, "Stix", objType)); //NON-NLS
|
||||
|
||||
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
|
||||
String setName = "STIX Indicator - " + StringUtils.defaultIfBlank(a_title, "(no title)"); //NON-NLS
|
||||
|
||||
Collection<BlackboardAttribute> attributes = Arrays.asList(
|
||||
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, setName),
|
||||
new BlackboardAttribute(TSK_TITLE, MODULE_NAME, observableId),
|
||||
new BlackboardAttribute(TSK_CATEGORY, MODULE_NAME, objType));
|
||||
|
||||
// Create artifact if it doesn't already exist.
|
||||
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
|
||||
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
|
||||
BlackboardArtifact bba = file.newArtifact(TSK_INTERESTING_FILE_HIT);
|
||||
bba.addAttributes(attributes);
|
||||
|
||||
|
||||
try {
|
||||
// index the artifact for keyword search
|
||||
Blackboard blackboard = currentCase.getServices().getBlackboard();
|
||||
blackboard.indexArtifact(bba);
|
||||
/*
|
||||
* post the artifact which will index the artifact for keyword
|
||||
* search, and fire an event to notify UI of this new artifact
|
||||
*/
|
||||
blackboard.postArtifact(bba, MODULE_NAME);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
|
||||
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());
|
||||
|
@ -23,7 +23,7 @@ import java.util.List;
|
||||
import javax.xml.bind.DatatypeConverter;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
@ -65,7 +65,7 @@ final class CustomArtifactType {
|
||||
* @throws BlackboardException If there is an error adding any of the types.
|
||||
*/
|
||||
static void addToCaseDatabase() throws Blackboard.BlackboardException, NoCurrentCaseException {
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
|
||||
artifactType = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAME, ARTIFACT_DISPLAY_NAME);
|
||||
intAttrType = blackboard.getOrAddAttributeType(INT_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, INT_ATTR_DISPLAY_NAME);
|
||||
doubleAttrType = blackboard.getOrAddAttributeType(DOUBLE_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, DOUBLE_ATTR_DISPLAY_NAME);
|
||||
|
@ -21,11 +21,11 @@ package org.sleuthkit.autopsy.test;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter;
|
||||
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
|
@ -21,10 +21,10 @@ package org.sleuthkit.autopsy.test;
|
||||
import java.util.logging.Level;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
|
@ -26,17 +26,17 @@ import org.openide.util.Exceptions;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
|
||||
import org.sleuthkit.autopsy.ingest.IngestJobContext;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* A file ingest module that creates some interestng artifacts
|
||||
* A file ingest module that creates some interesting artifacts
|
||||
* with attributes based on files for test purposes.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
@ -55,7 +55,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
|
||||
@Override
|
||||
public void startUp(IngestJobContext context) throws IngestModuleException {
|
||||
try {
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
|
||||
artifactType = blackboard.getOrAddArtifactType(INT_ARTIFACT_TYPE_NAME, INT_ARTIFACT_DISPLAY_NAME);
|
||||
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
|
||||
throw new IngestModuleException(Bundle.InterestingArtifactCreatorIngestModule_exceptionMessage_errorCreatingCustomType(), ex);
|
||||
@ -77,7 +77,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
|
||||
* type.
|
||||
*/
|
||||
int randomArtIndex = (int) (Math.random() * 3);
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
|
||||
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getCaseBlackboard();
|
||||
BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]);
|
||||
BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID());
|
||||
Collection<BlackboardAttribute> baseAttributes = new ArrayList<>();
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/Bundle.properties
Normal file → Executable file
@ -2,6 +2,8 @@ CTL_MakeTimeline=Timeline
|
||||
CTL_TimeLineTopComponentAction=TimeLineTopComponent
|
||||
CTL_TimeLineTopComponent=Timeline
|
||||
|
||||
FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval.
|
||||
FilteredEventsModel.timeRangeProperty.errorTitle=Timeline
|
||||
OpenTimelineAction.displayName=Timeline
|
||||
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
|
||||
OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.
|
||||
@ -9,7 +11,7 @@ PrompDialogManager.buttonType.continueNoUpdate=Continue Without Updating
|
||||
PrompDialogManager.buttonType.showTimeline=Continue
|
||||
PrompDialogManager.buttonType.update=Update DB
|
||||
PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?
|
||||
PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.
|
||||
PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.
|
||||
PromptDialogManager.progressDialog.title=Populating Timeline Data
|
||||
PromptDialogManager.rebuildPrompt.details=Details
|
||||
PromptDialogManager.rebuildPrompt.headerText=The Timeline DB is incomplete and/or out of date. Some events may be missing or inaccurate and some features may be unavailable.
|
||||
@ -25,6 +27,7 @@ ShowInTimelineDialog.fileTitle=View {0} in timeline.
|
||||
ShowInTimelineDialog.showTimelineButtonType.text=Show Timeline
|
||||
Timeline.dialogs.title=\ Timeline
|
||||
Timeline.frameName.text={0} - Autopsy Timeline
|
||||
Timeline.old.version=\ This Case was created with an older version of Autopsy.\nThe Timeline with not show events from data sources added with the older version of Autopsy
|
||||
Timeline.resultsPanel.title=Timeline Results
|
||||
Timeline.runJavaFxThread.progress.creating=Creating timeline . . .
|
||||
Timeline.zoomOutButton.text=Zoom Out
|
||||
@ -32,16 +35,6 @@ Timeline.goToButton.text=Go To:
|
||||
Timeline.yearBarChart.x.years=Years
|
||||
Timeline.resultPanel.loading=Loading...
|
||||
|
||||
TimeLineController.errorTitle=Timeline error.
|
||||
TimeLineController.outOfDate.errorMessage=Error determing if the timeline is out of date. We will assume it should be updated. See the logs for more details.
|
||||
TimeLineController.rebuildReasons.incompleteOldSchema=The Timeline events database was previously populated without incomplete information: Some features may be unavailable or non-functional unless you update the events database.
|
||||
TimeLineController.rebuildReasons.ingestWasRunning=The Timeline events database was previously populated while ingest was running: Some events may be missing, incomplete, or inaccurate.
|
||||
TimeLineController.rebuildReasons.outOfDate=The event data is out of date: Not all events will be visible.
|
||||
TimeLineController.rebuildReasons.outOfDateError=Could not determine if the timeline data is out of date.
|
||||
TimeLineController.setEventsDBStale.errMsgNotStale=Failed to mark the timeline db as not stale. Some results may be out of date or missing.
|
||||
TimeLineController.setEventsDBStale.errMsgStale=Failed to mark the timeline db as stale. Some results may be out of date or missing.
|
||||
TimeLinecontroller.setIngestRunning.errMsgNotRunning=Failed to mark the timeline db as populated while ingest was not running. Some results may be out of date or missing.
|
||||
TimeLineController.setIngestRunning.errMsgRunning=Failed to mark the timeline db as populated while ingest was running. Some results may be out of date or missing.
|
||||
TimeLinecontroller.updateNowQuestion=Do you want to update the events database now?
|
||||
TimelineFrame.title=Timeline
|
||||
TimelinePanel.jButton1.text=6m
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/Bundle_ja.properties
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/Bundle_ja.properties
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/CancellationProgressTask.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/CancellationProgressTask.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/ChronoFieldListCell.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/ChronoFieldListCell.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/FXMLConstructor.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/FXMLConstructor.java
Normal file → Executable file
699
Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
Executable file
699
Core/src/org/sleuthkit/autopsy/timeline/FilteredEventsModel.java
Executable file
@ -0,0 +1,699 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import com.google.common.eventbus.EventBus;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.logging.Level;
|
||||
import javafx.beans.InvalidationListener;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.beans.property.ReadOnlyObjectWrapper;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.collections.ObservableList;
|
||||
import javafx.collections.ObservableMap;
|
||||
import javafx.collections.ObservableSet;
|
||||
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
|
||||
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.Interval;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
|
||||
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
|
||||
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TimelineManager;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TimelineEvent;
|
||||
import org.sleuthkit.datamodel.TimelineEventType;
|
||||
import org.sleuthkit.datamodel.TimelineFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
|
||||
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
|
||||
|
||||
/**
|
||||
* This class acts as the model for a TimelineView
|
||||
*
|
||||
* Views can register listeners on properties returned by methods.
|
||||
*
|
||||
* This class is implemented as a filtered view into an underlying
|
||||
* TimelineManager.
|
||||
*
|
||||
* Maintainers, NOTE: as many methods as possible should cache their results so
|
||||
* as to avoid unnecessary db calls through the TimelineManager -jm
|
||||
*
|
||||
* Concurrency Policy: TimelineManager is internally synchronized, so methods
|
||||
* that only access the TimelineManager atomically do not need further
|
||||
* synchronization. All other member state variables should only be accessed
|
||||
* with intrinsic lock of containing FilteredEventsModel held.
|
||||
*
|
||||
*/
|
||||
public final class FilteredEventsModel {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
|
||||
|
||||
private final TimelineManager eventManager;
|
||||
|
||||
private final Case autoCase;
|
||||
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
|
||||
|
||||
//Filter and zoome state
|
||||
private final ReadOnlyObjectWrapper<RootFilterState> requestedFilter = new ReadOnlyObjectWrapper<>();
|
||||
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
|
||||
private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
|
||||
private final ReadOnlyObjectWrapper< TimelineEventType.TypeLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.TypeLevel.BASE_TYPE);
|
||||
private final ReadOnlyObjectWrapper< TimelineEvent.DescriptionLevel> requestedLOD = new ReadOnlyObjectWrapper<>(TimelineEvent.DescriptionLevel.SHORT);
|
||||
// end Filter and zoome state
|
||||
|
||||
//caches
|
||||
private final LoadingCache<Object, Long> maxCache;
|
||||
private final LoadingCache<Object, Long> minCache;
|
||||
private final LoadingCache<Long, TimelineEvent> idToEventCache;
|
||||
private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
|
||||
/** Map from datasource id to datasource name. */
|
||||
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
|
||||
private final ObservableSet< String> hashSets = FXCollections.observableSet();
|
||||
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
|
||||
// end caches
|
||||
|
||||
/**
|
||||
* Make a DataSourceFilter from an entry from the datasourcesMap.
|
||||
*
|
||||
* @param dataSourceEntry A map entry from datasource id to datasource name.
|
||||
*
|
||||
* @return A new DataSourceFilter for the given datsourcesMap entry.
|
||||
*/
|
||||
private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry<Long, String> dataSourceEntry) {
|
||||
return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
|
||||
}
|
||||
|
||||
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
|
||||
this.autoCase = autoCase;
|
||||
this.eventManager = autoCase.getSleuthkitCase().getTimelineManager();
|
||||
populateFilterData();
|
||||
|
||||
//caches
|
||||
idToEventCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(5000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
.build(new CacheLoaderImpl<>(eventManager::getEventById));
|
||||
eventCountsCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(1000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
.build(new CacheLoaderImpl<>(this::countEventsByType));
|
||||
|
||||
maxCache = CacheBuilder.newBuilder()
|
||||
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxTime()));
|
||||
minCache = CacheBuilder.newBuilder()
|
||||
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
|
||||
|
||||
InvalidationListener filterSyncListener = observable -> {
|
||||
RootFilterState rootFilter = filterProperty().get();
|
||||
syncFilters(rootFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
};
|
||||
|
||||
datasourcesMap.addListener(filterSyncListener);
|
||||
hashSets.addListener(filterSyncListener);
|
||||
tagNames.addListener(filterSyncListener);
|
||||
|
||||
requestedFilter.set(getDefaultFilter());
|
||||
|
||||
requestedZoomState.addListener(observable -> {
|
||||
final ZoomState zoomState = requestedZoomState.get();
|
||||
|
||||
if (zoomState != null) {
|
||||
synchronized (FilteredEventsModel.this) {
|
||||
requestedTypeZoom.set(zoomState.getTypeZoomLevel());
|
||||
requestedFilter.set(zoomState.getFilterState());
|
||||
requestedTimeRange.set(zoomState.getTimeRange());
|
||||
requestedLOD.set(zoomState.getDescriptionLOD());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
requestedZoomState.bind(currentStateProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* get the count of all events that fit the given zoom params organized by
|
||||
* the EvenType of the level specified in the zoomState
|
||||
*
|
||||
* @param zoomState The params that control what events to count and how to
|
||||
* organize the returned map
|
||||
*
|
||||
* @return a map from event type( of the requested level) to event counts
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
private Map<TimelineEventType, Long> countEventsByType(ZoomState zoomState) throws TskCoreException {
|
||||
if (zoomState.getTimeRange() == null) {
|
||||
return Collections.emptyMap();
|
||||
} else {
|
||||
return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000,
|
||||
zoomState.getTimeRange().getEndMillis() / 1000,
|
||||
zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel());
|
||||
}
|
||||
}
|
||||
|
||||
public TimelineManager getEventManager() {
|
||||
return eventManager;
|
||||
}
|
||||
|
||||
public SleuthkitCase getSleuthkitCase() {
|
||||
return autoCase.getSleuthkitCase();
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
|
||||
return eventManager.getSpanningInterval(timeRange, filter, timeZone);
|
||||
}
|
||||
|
||||
/**
|
||||
* Readonly observable property for the current ZoomState
|
||||
*
|
||||
* @return A readonly observable property for the current ZoomState.
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<ZoomState> zoomStateProperty() {
|
||||
return requestedZoomState.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current ZoomState
|
||||
*
|
||||
* @return The current ZoomState
|
||||
*/
|
||||
synchronized public ZoomState getZoomState() {
|
||||
return requestedZoomState.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the data used to determine the available filters.
|
||||
*/
|
||||
synchronized private void populateFilterData() throws TskCoreException {
|
||||
SleuthkitCase skCase = autoCase.getSleuthkitCase();
|
||||
hashSets.addAll(eventManager.getHashSetNames());
|
||||
|
||||
//because there is no way to remove a datasource we only add to this map.
|
||||
for (DataSource ds : skCase.getDataSources()) {
|
||||
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
|
||||
}
|
||||
|
||||
//should this only be tags applied to files or event bearing artifacts?
|
||||
tagNames.setAll(skCase.getTagNamesInUse());
|
||||
}
|
||||
|
||||
/**
|
||||
* "sync" the given root filter with the state of the casee: Disable filters
|
||||
* for tags that are not in use in the case, and add new filters for tags,
|
||||
* hashsets, and datasources, that don't have them. New filters are selected
|
||||
* by default.
|
||||
*
|
||||
* @param rootFilterState the filter state to modify so it is consistent
|
||||
* with the tags in use in the case
|
||||
*/
|
||||
public void syncFilters(RootFilterState rootFilterState) {
|
||||
TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
|
||||
for (TagName tagName : tagNames) {
|
||||
tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
|
||||
}
|
||||
for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
|
||||
// disable states for tag names that don't exist in case.
|
||||
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
|
||||
}
|
||||
|
||||
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
|
||||
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
|
||||
|
||||
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
|
||||
for (String hashSet : hashSets) {
|
||||
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a read only view of the time range currently in view.
|
||||
*
|
||||
* @return A read only view of the time range currently in view.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
|
||||
"FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
try {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
} catch (TskCoreException timelineCacheException) {
|
||||
MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
|
||||
Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
|
||||
logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
|
||||
}
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<TimelineEvent.DescriptionLevel> descriptionLODProperty() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilterState> filterProperty() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<TimelineEventType.TypeLevel> eventTypeZoomProperty() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* The time range currently in view.
|
||||
*
|
||||
* @return The time range currently in view.
|
||||
*/
|
||||
synchronized public Interval getTimeRange() {
|
||||
return getZoomState().getTimeRange();
|
||||
}
|
||||
|
||||
synchronized public TimelineEvent.DescriptionLevel getDescriptionLOD() {
|
||||
return getZoomState().getDescriptionLOD();
|
||||
}
|
||||
|
||||
synchronized public RootFilterState getFilterState() {
|
||||
return getZoomState().getFilterState();
|
||||
}
|
||||
|
||||
synchronized public TimelineEventType.TypeLevel getEventTypeZoom() {
|
||||
return getZoomState().getTypeZoomLevel();
|
||||
}
|
||||
|
||||
/** Get the default filter used at startup.
|
||||
*
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public synchronized RootFilterState getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
datasourcesMap.entrySet().forEach(dataSourceEntry
|
||||
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
|
||||
|
||||
TagsFilter tagsFilter = new TagsFilter();
|
||||
tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
|
||||
|
||||
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
|
||||
|
||||
return new RootFilterState(new RootFilter(new HideKnownFilter(),
|
||||
tagsFilter,
|
||||
hashHitsFilter,
|
||||
new TextFilter(),
|
||||
new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE),
|
||||
dataSourcesFilter,
|
||||
fileTypesFilter,
|
||||
Collections.emptySet()));
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException {
|
||||
return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone);
|
||||
}
|
||||
|
||||
public TimelineEvent getEventById(Long eventID) throws TskCoreException {
|
||||
try {
|
||||
return idToEventCache.get(eventID);
|
||||
} catch (ExecutionException ex) {
|
||||
throw new TskCoreException("Error getting cached event from ID", ex);
|
||||
}
|
||||
}
|
||||
|
||||
public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
|
||||
Set<TimelineEvent> events = new HashSet<>();
|
||||
for (Long id : eventIDs) {
|
||||
events.add(getEventById(id));
|
||||
}
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
|
||||
return eventManager.getTagCountsByTagName(eventIDsWithTags);
|
||||
}
|
||||
|
||||
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
|
||||
|
||||
final Interval overlap;
|
||||
RootFilter intersection;
|
||||
synchronized (this) {
|
||||
overlap = getSpanningInterval().overlap(timeRange);
|
||||
intersection = getFilterState().intersect(filter).getActiveFilter();
|
||||
}
|
||||
|
||||
return eventManager.getEventIDs(overlap, intersection);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the number of events that pass the requested filter and are within
|
||||
* the given time range.
|
||||
*
|
||||
* NOTE: this method does not change the requested time range
|
||||
*
|
||||
* @param timeRange
|
||||
*
|
||||
* @return
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
|
||||
|
||||
final RootFilterState filter;
|
||||
final TimelineEventType.TypeLevel typeZoom;
|
||||
synchronized (this) {
|
||||
filter = getFilterState();
|
||||
typeZoom = getEventTypeZoom();
|
||||
}
|
||||
try {
|
||||
return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null));
|
||||
} catch (ExecutionException executionException) {
|
||||
throw new TskCoreException("Error getting cached event counts.`1", executionException);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The smallest interval spanning all the events from the case,
|
||||
* ignoring any filters or requested ranges.
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Interval getSpanningInterval() throws TskCoreException {
|
||||
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the smallest interval spanning all the given events.
|
||||
*
|
||||
* @param eventIDs The IDs of the events to get a spanning interval arround.
|
||||
*
|
||||
* @return the smallest interval spanning all the given events
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
|
||||
return eventManager.getSpanningInterval(eventIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time (in seconds from unix epoch) of the absolutely first
|
||||
* event available from the repository, ignoring any filters or
|
||||
* requested ranges
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Long getMinTime() throws TskCoreException {
|
||||
try {
|
||||
return minCache.get("min"); // NON-NLS
|
||||
} catch (ExecutionException ex) {
|
||||
throw new TskCoreException("Error getting cached min time.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time (in seconds from unix epoch) of the absolutely last
|
||||
* event available from the repository, ignoring any filters or
|
||||
* requested ranges
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Long getMaxTime() throws TskCoreException {
|
||||
try {
|
||||
return maxCache.get("max"); // NON-NLS
|
||||
} catch (ExecutionException ex) {
|
||||
throw new TskCoreException("Error getting cached max time.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
|
||||
ContentTag contentTag = evt.getAddedTag();
|
||||
Content content = contentTag.getContent();
|
||||
Set<Long> updatedEventIDs = addTag(content.getId(), null, contentTag);
|
||||
return postTagsAdded(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
|
||||
BlackboardArtifactTag artifactTag = evt.getAddedTag();
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
Set<Long> updatedEventIDs = addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);
|
||||
return postTagsAdded(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
|
||||
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
|
||||
|
||||
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
|
||||
return postTagsDeleted(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
|
||||
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
|
||||
|
||||
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
|
||||
return postTagsDeleted(updatedEventIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Set of event IDs for the events that are derived from the given
|
||||
* file.
|
||||
*
|
||||
* @param file The AbstractFile to get derived event IDs
|
||||
* for.
|
||||
* @param includeDerivedArtifacts If true, also get event IDs for events
|
||||
* derived from artifacts derived form this
|
||||
* file. If false, only gets events derived
|
||||
* directly from this file (file system
|
||||
* timestamps).
|
||||
*
|
||||
* @return A Set of event IDs for the events that are derived from the given
|
||||
* file.
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
|
||||
return eventManager.getEventIDsForFile(file, includeDerivedArtifacts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a List of event IDs for the events that are derived from the given
|
||||
* artifact.
|
||||
*
|
||||
* @param artifact The BlackboardArtifact to get derived event IDs for.
|
||||
*
|
||||
* @return A List of event IDs for the events that are derived from the
|
||||
* given artifact.
|
||||
*
|
||||
* @throws org.sleuthkit.datamodel.TskCoreException
|
||||
*/
|
||||
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
|
||||
return eventManager.getEventIDsForArtifact(artifact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a TagsAddedEvent to all registered subscribers, if the given set of
|
||||
* updated event IDs is not empty.
|
||||
*
|
||||
* @param updatedEventIDs The set of event ids to be included in the
|
||||
* TagsAddedEvent.
|
||||
*
|
||||
* @return True if an event was posted.
|
||||
*/
|
||||
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsAddedEvent(updatedEventIDs));
|
||||
}
|
||||
return tagsUpdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a TagsDeletedEvent to all registered subscribers, if the given set
|
||||
* of updated event IDs is not empty.
|
||||
*
|
||||
* @param updatedEventIDs The set of event ids to be included in the
|
||||
* TagsDeletedEvent.
|
||||
*
|
||||
* @return True if an event was posted.
|
||||
*/
|
||||
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
|
||||
}
|
||||
return tagsUpdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register the given object to receive events.
|
||||
*
|
||||
* @param subscriber The object to register. Must implement public methods
|
||||
* annotated with Subscribe.
|
||||
*/
|
||||
synchronized public void registerForEvents(Object subscriber) {
|
||||
eventbus.register(subscriber);
|
||||
}
|
||||
|
||||
/**
|
||||
* Un-register the given object, so it no longer receives events.
|
||||
*
|
||||
* @param subscriber The object to un-register.
|
||||
*/
|
||||
synchronized public void unRegisterForEvents(Object subscriber) {
|
||||
eventbus.unregister(subscriber);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a RefreshRequestedEvent to all registered subscribers.
|
||||
*/
|
||||
public void postRefreshRequest() {
|
||||
eventbus.post(new RefreshRequestedEvent());
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)Post an AutopsyEvent received from another event distribution system
|
||||
* locally to all registered subscribers.
|
||||
*
|
||||
* @param event The event to re-post.
|
||||
*/
|
||||
public void postAutopsyEventLocally(AutopsyEvent event) {
|
||||
eventbus.post(event);
|
||||
}
|
||||
|
||||
public ImmutableList<TimelineEventType> getEventTypes() {
|
||||
return eventManager.getEventTypes();
|
||||
}
|
||||
|
||||
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) throws TskCoreException {
|
||||
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, true);
|
||||
if (isNotEmpty(updatedEventIDs)) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) throws TskCoreException {
|
||||
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, tagged);
|
||||
if (isNotEmpty(updatedEventIDs)) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized public Set<Long> setHashHit(Collection<BlackboardArtifact> artifacts, boolean hasHashHit) throws TskCoreException {
|
||||
Set<Long> updatedEventIDs = new HashSet<>();
|
||||
for (BlackboardArtifact artifact : artifacts) {
|
||||
updatedEventIDs.addAll(eventManager.setEventsHashed(artifact.getObjectID(), hasHashHit));
|
||||
}
|
||||
if (isNotEmpty(updatedEventIDs)) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidate the timeline caches for the given event IDs. Also forces the
|
||||
* filter values to be updated with any new values from the case data.( data
|
||||
* sources, tags, etc)
|
||||
*
|
||||
* @param updatedEventIDs A collection of the event IDs whose cached event
|
||||
* objects should be invalidated. Can be null or an
|
||||
* empty sett to invalidate the general caches, such
|
||||
* as min/max time, or the counts per event type.
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
|
||||
minCache.invalidateAll();
|
||||
maxCache.invalidateAll();
|
||||
idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
|
||||
eventCountsCache.invalidateAll();
|
||||
|
||||
populateFilterData();
|
||||
|
||||
eventbus.post(new CacheInvalidatedEvent());
|
||||
}
|
||||
|
||||
/**
|
||||
* Event fired when a cache has been invalidated. The UI should make it
|
||||
* clear that the view is potentially out of date and present an action to
|
||||
* refresh the view.
|
||||
*/
|
||||
public static class CacheInvalidatedEvent {
|
||||
|
||||
private CacheInvalidatedEvent() {
|
||||
}
|
||||
}
|
||||
}
|
0
Core/src/org/sleuthkit/autopsy/timeline/ModifiableProxyLookup.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/ModifiableProxyLookup.java
Normal file → Executable file
22
Core/src/org/sleuthkit/autopsy/timeline/TimeLineException.java → Core/src/org/sleuthkit/autopsy/timeline/OnStart.java
Normal file → Executable file
22
Core/src/org/sleuthkit/autopsy/timeline/TimeLineException.java → Core/src/org/sleuthkit/autopsy/timeline/OnStart.java
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014 Basis Technology Corp.
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -19,15 +19,19 @@
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
/**
|
||||
*
|
||||
* The org.openide.modules.OnStart annotation tells NetBeans to invoke this
|
||||
* class's run method.
|
||||
*/
|
||||
public class TimeLineException extends Exception {
|
||||
@org.openide.modules.OnStart
|
||||
public class OnStart implements Runnable {
|
||||
|
||||
public TimeLineException(String string, Exception e) {
|
||||
super(string, e);
|
||||
}
|
||||
|
||||
public TimeLineException(String string) {
|
||||
super(string);
|
||||
/**
|
||||
* This method is invoked by virtue of the OnStart annotation on the this
|
||||
* class
|
||||
*/
|
||||
@Override
|
||||
public void run() {
|
||||
TimeLineModule.onStart();
|
||||
}
|
||||
}
|
||||
|
55
Core/src/org/sleuthkit/autopsy/timeline/OpenTimelineAction.java
Normal file → Executable file
55
Core/src/org/sleuthkit/autopsy/timeline/OpenTimelineAction.java
Normal file → Executable file
@ -19,7 +19,6 @@
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import java.awt.Component;
|
||||
import java.io.IOException;
|
||||
import java.util.logging.Level;
|
||||
import javafx.application.Platform;
|
||||
import javax.swing.ImageIcon;
|
||||
@ -47,10 +46,13 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
* An Action that opens the Timeline window. Has methods to open the window in
|
||||
* various specific states (e.g., showing a specific artifact in the List View)
|
||||
*/
|
||||
|
||||
|
||||
@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline")
|
||||
@ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false)
|
||||
@ActionReferences(value = {
|
||||
@ActionReference(path = "Menu/Tools", position = 102),
|
||||
@ActionReference(path = "Menu/Tools", position = 102)
|
||||
,
|
||||
@ActionReference(path = "Toolbars/Case", position = 102)})
|
||||
public final class OpenTimelineAction extends CallableSystemAction {
|
||||
|
||||
@ -58,19 +60,10 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
private static final Logger logger = Logger.getLogger(OpenTimelineAction.class.getName());
|
||||
private static final int FILE_LIMIT = 6_000_000;
|
||||
|
||||
private static TimeLineController timeLineController = null;
|
||||
|
||||
private final JMenuItem menuItem;
|
||||
private final JButton toolbarButton = new JButton(getName(),
|
||||
new ImageIcon(getClass().getResource("images/btn_icon_timeline_colorized_26.png"))); //NON-NLS
|
||||
|
||||
/**
|
||||
* Invalidate the reference to the controller so that a new one will be
|
||||
* instantiated the next time this action is invoked
|
||||
*/
|
||||
synchronized static void invalidateController() {
|
||||
timeLineController = null;
|
||||
}
|
||||
|
||||
public OpenTimelineAction() {
|
||||
toolbarButton.addActionListener(actionEvent -> performAction());
|
||||
@ -93,24 +86,24 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
public void performAction() {
|
||||
if (tooManyFiles()) {
|
||||
Platform.runLater(PromptDialogManager::showTooManyFiles);
|
||||
synchronized (OpenTimelineAction.this) {
|
||||
if (timeLineController != null) {
|
||||
timeLineController.shutDownTimeLine();
|
||||
}
|
||||
}
|
||||
setEnabled(false);
|
||||
}else if("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) {
|
||||
} else if ("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) {
|
||||
Platform.runLater(PromptDialogManager::showTimeLineDisabledMessage);
|
||||
setEnabled(false);
|
||||
}else {
|
||||
showTimeline();
|
||||
} else {
|
||||
try {
|
||||
showTimeline();
|
||||
} catch (TskCoreException ex) {
|
||||
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
|
||||
logger.log(Level.SEVERE, "Error showingtimeline.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages({
|
||||
"OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.",
|
||||
"OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources."})
|
||||
synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) {
|
||||
synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) throws TskCoreException {
|
||||
try {
|
||||
Case currentCase = Case.getCurrentCaseThrows();
|
||||
if (currentCase.hasData() == false) {
|
||||
@ -118,20 +111,8 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
logger.log(Level.INFO, "Could not create timeline, there are no data sources.");// NON-NLS
|
||||
return;
|
||||
}
|
||||
try {
|
||||
if (timeLineController == null) {
|
||||
timeLineController = new TimeLineController(currentCase);
|
||||
} else if (timeLineController.getAutopsyCase() != currentCase) {
|
||||
timeLineController.shutDownTimeLine();
|
||||
timeLineController = new TimeLineController(currentCase);
|
||||
}
|
||||
|
||||
timeLineController.showTimeLine(file, artifact);
|
||||
|
||||
} catch (IOException iOException) {
|
||||
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
|
||||
logger.log(Level.SEVERE, "Failed to initialize per case timeline settings.", iOException);
|
||||
}
|
||||
TimeLineController controller = TimeLineModule.getController();
|
||||
controller.showTimeLine(file, artifact);
|
||||
} catch (NoCurrentCaseException e) {
|
||||
//there is no case... Do nothing.
|
||||
}
|
||||
@ -141,7 +122,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
* Open the Timeline window with the default initial view.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
public void showTimeline() {
|
||||
public void showTimeline() throws TskCoreException {
|
||||
showTimeline(null, null);
|
||||
}
|
||||
|
||||
@ -153,7 +134,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
* @param file The AbstractFile to show in the Timeline.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
public void showFileInTimeline(AbstractFile file) {
|
||||
public void showFileInTimeline(AbstractFile file) throws TskCoreException {
|
||||
showTimeline(file, null);
|
||||
}
|
||||
|
||||
@ -164,7 +145,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
|
||||
* @param artifact The BlackboardArtifact to show in the Timeline.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
|
||||
public void showArtifactInTimeline(BlackboardArtifact artifact) {
|
||||
public void showArtifactInTimeline(BlackboardArtifact artifact) throws TskCoreException {
|
||||
showTimeline(null, artifact);
|
||||
}
|
||||
|
||||
|
@ -1,175 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2016-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
|
||||
/**
|
||||
* Provides access to per-case timeline properties (key-value store).
|
||||
*/
|
||||
class PerCaseTimelineProperties {
|
||||
|
||||
private static final String STALE_KEY = "stale"; //NON-NLS
|
||||
private static final String WAS_INGEST_RUNNING_KEY = "was_ingest_running"; // NON-NLS
|
||||
|
||||
private final Path propertiesPath;
|
||||
|
||||
PerCaseTimelineProperties(Case autopsyCase) {
|
||||
Objects.requireNonNull(autopsyCase, "Case must not be null");
|
||||
propertiesPath = Paths.get(autopsyCase.getModuleDirectory(), "Timeline", "timeline.properties"); //NON-NLS
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the DB stale, i.e. does it need to be updated because new datasources
|
||||
* (eg) have been added to the case.
|
||||
*
|
||||
* @return true if the db is stale
|
||||
*
|
||||
* @throws IOException if there is a problem reading the state from disk
|
||||
*/
|
||||
public synchronized boolean isDBStale() throws IOException {
|
||||
|
||||
String stale = getProperty(STALE_KEY);
|
||||
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* record the state of the events db as stale(true) or not stale(false).
|
||||
*
|
||||
* @param stale the new state of the event db. true for stale, false for not
|
||||
* stale.
|
||||
*
|
||||
* @throws IOException if there was a problem writing the state to disk.
|
||||
*/
|
||||
public synchronized void setDbStale(Boolean stale) throws IOException {
|
||||
setProperty(STALE_KEY, stale.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Was ingest running the last time the database was updated?
|
||||
*
|
||||
* @return true if ingest was running the last time the db was updated
|
||||
*
|
||||
* @throws IOException if there was a problem reading from disk
|
||||
*/
|
||||
public synchronized boolean wasIngestRunning() throws IOException {
|
||||
String stale = getProperty(WAS_INGEST_RUNNING_KEY);
|
||||
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
|
||||
}
|
||||
|
||||
/**
|
||||
* record whether ingest was running during the last time the database was
|
||||
* updated
|
||||
*
|
||||
* @param ingestRunning true if ingest was running
|
||||
*
|
||||
* @throws IOException if there was a problem writing to disk
|
||||
*/
|
||||
public synchronized void setIngestRunning(Boolean ingestRunning) throws IOException {
|
||||
setProperty(WAS_INGEST_RUNNING_KEY, ingestRunning.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@link Path} to the properties file. If the file does not exist, it
|
||||
* will be created.
|
||||
*
|
||||
* @return the Path to the properties file.
|
||||
*
|
||||
* @throws IOException if there was a problem creating the properties file
|
||||
*/
|
||||
private synchronized Path getPropertiesPath() throws IOException {
|
||||
|
||||
if (!Files.exists(propertiesPath)) {
|
||||
Path parent = propertiesPath.getParent();
|
||||
Files.createDirectories(parent);
|
||||
Files.createFile(propertiesPath);
|
||||
}
|
||||
return propertiesPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the property with the given key.
|
||||
*
|
||||
* @param propertyKey - The property key to get the value for.
|
||||
*
|
||||
* @return - the value associated with the property.
|
||||
*
|
||||
* @throws IOException if there was a problem reading the property from disk
|
||||
*/
|
||||
private synchronized String getProperty(String propertyKey) throws IOException {
|
||||
return getProperties().getProperty(propertyKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the given property to the given value.
|
||||
*
|
||||
* @param propertyKey - The key of the property to be modified.
|
||||
* @param propertyValue - the value to set the property to.
|
||||
*
|
||||
* @throws IOException if there was a problem writing the property to disk
|
||||
*/
|
||||
private synchronized void setProperty(String propertyKey, String propertyValue) throws IOException {
|
||||
Path propertiesFile = getPropertiesPath();
|
||||
Properties props = getProperties(propertiesFile);
|
||||
props.setProperty(propertyKey, propertyValue);
|
||||
|
||||
try (OutputStream fos = Files.newOutputStream(propertiesFile)) {
|
||||
props.store(fos, ""); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@link Properties} object used to store the timeline properties.
|
||||
*
|
||||
* @return a properties object
|
||||
*
|
||||
* @throws IOException if there was a problem reading the .properties file
|
||||
*/
|
||||
private synchronized Properties getProperties() throws IOException {
|
||||
return getProperties(getPropertiesPath());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a {@link Properties} object populated form the given .properties
|
||||
* file.
|
||||
*
|
||||
* @param propertiesFile a path to the .properties file to load
|
||||
*
|
||||
* @return a properties object
|
||||
*
|
||||
* @throws IOException if there was a problem reading the .properties file
|
||||
*/
|
||||
private synchronized Properties getProperties(final Path propertiesFile) throws IOException {
|
||||
try (InputStream inputStream = Files.newInputStream(propertiesFile)) {
|
||||
Properties props = new Properties();
|
||||
props.load(inputStream);
|
||||
return props;
|
||||
}
|
||||
}
|
||||
}
|
3
Core/src/org/sleuthkit/autopsy/timeline/PromptDialogManager.java
Normal file → Executable file
3
Core/src/org/sleuthkit/autopsy/timeline/PromptDialogManager.java
Normal file → Executable file
@ -152,7 +152,7 @@ public final class PromptDialogManager {
|
||||
* @return True if they want to continue anyways.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.",
|
||||
"PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.",
|
||||
"PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?"})
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
boolean confirmDuringIngest() {
|
||||
@ -235,5 +235,4 @@ public final class PromptDialogManager {
|
||||
dialog.setHeaderText(Bundle.PromptDialogManager_showTimeLineDisabledMessage_headerText());
|
||||
dialog.showAndWait();
|
||||
}
|
||||
|
||||
}
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.fxml
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.fxml
Normal file → Executable file
52
Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java
Normal file → Executable file
52
Core/src/org/sleuthkit/autopsy/timeline/ShowInTimelineDialog.java
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Copyright 2011-2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -25,10 +25,12 @@ import java.time.Instant;
|
||||
import java.time.temporal.ChronoField;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.beans.binding.Bindings;
|
||||
import javafx.beans.property.SimpleObjectProperty;
|
||||
import javafx.fxml.FXML;
|
||||
@ -58,14 +60,15 @@ import org.controlsfx.validation.Validator;
|
||||
import org.joda.time.Interval;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.SingleEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
|
||||
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TimelineEventType;
|
||||
import org.sleuthkit.datamodel.TimelineEvent;
|
||||
|
||||
/**
|
||||
* A Dialog that, given an AbstractFile or BlackBoardArtifact, allows the user
|
||||
@ -93,13 +96,13 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
ChronoField.SECOND_OF_MINUTE);
|
||||
|
||||
@FXML
|
||||
private TableView<SingleEvent> eventTable;
|
||||
private TableView<TimelineEvent> eventTable;
|
||||
|
||||
@FXML
|
||||
private TableColumn<SingleEvent, EventType> typeColumn;
|
||||
private TableColumn<TimelineEvent, TimelineEventType> typeColumn;
|
||||
|
||||
@FXML
|
||||
private TableColumn<SingleEvent, Long> dateTimeColumn;
|
||||
private TableColumn<TimelineEvent, Long> dateTimeColumn;
|
||||
|
||||
@FXML
|
||||
private Spinner<Integer> amountSpinner;
|
||||
@ -112,8 +115,6 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
|
||||
private final VBox contentRoot = new VBox();
|
||||
|
||||
private final TimeLineController controller;
|
||||
|
||||
private final ValidationSupport validationSupport = new ValidationSupport();
|
||||
|
||||
/**
|
||||
@ -124,10 +125,8 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
* from.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."
|
||||
})
|
||||
private ShowInTimelineDialog(TimeLineController controller, List<Long> eventIDS) {
|
||||
this.controller = controller;
|
||||
"ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."})
|
||||
private ShowInTimelineDialog(TimeLineController controller, Collection<Long> eventIDS) throws TskCoreException {
|
||||
|
||||
//load dialog content fxml
|
||||
final String name = "nbres:/" + StringUtils.replace(ShowInTimelineDialog.class.getPackage().getName(), ".", "/") + "/ShowInTimelineDialog.fxml"; // NON-NLS
|
||||
@ -195,7 +194,16 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
dateTimeColumn.setCellFactory(param -> new DateTimeTableCell<>());
|
||||
|
||||
//add events to table
|
||||
eventTable.getItems().setAll(eventIDS.stream().map(controller.getEventsModel()::getEventById).collect(Collectors.toSet()));
|
||||
Set<TimelineEvent> events = new HashSet<>();
|
||||
FilteredEventsModel eventsModel = controller.getEventsModel();
|
||||
for (Long eventID : eventIDS) {
|
||||
try {
|
||||
events.add(eventsModel.getEventById(eventID));
|
||||
} catch (TskCoreException ex) {
|
||||
throw new TskCoreException("Error getting event by id.", ex);
|
||||
}
|
||||
}
|
||||
eventTable.getItems().setAll(events);
|
||||
eventTable.setPrefHeight(Math.min(200, 24 * eventTable.getItems().size() + 28));
|
||||
}
|
||||
|
||||
@ -207,7 +215,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
* @param artifact The BlackboardArtifact to configure this dialog for.
|
||||
*/
|
||||
@NbBundle.Messages({"ShowInTimelineDialog.artifactTitle=View Result in Timeline."})
|
||||
ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) {
|
||||
ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) throws TskCoreException {
|
||||
//get events IDs from artifact
|
||||
this(controller, controller.getEventsModel().getEventIDsForArtifact(artifact));
|
||||
|
||||
@ -237,7 +245,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
@NbBundle.Messages({"# {0} - file path",
|
||||
"ShowInTimelineDialog.fileTitle=View {0} in timeline.",
|
||||
"ShowInTimelineDialog.eventSelectionValidator.message=You must select an event."})
|
||||
ShowInTimelineDialog(TimeLineController controller, AbstractFile file) {
|
||||
ShowInTimelineDialog(TimeLineController controller, AbstractFile file) throws TskCoreException {
|
||||
this(controller, controller.getEventsModel().getEventIDsForFile(file, false));
|
||||
|
||||
/*
|
||||
@ -293,11 +301,11 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
/**
|
||||
* Construct this Dialog's "result" from the given event.
|
||||
*
|
||||
* @param selectedEvent The SingleEvent to include in the EventInTimeRange
|
||||
* @param selectedEvent The TimeLineEvent to include in the EventInTimeRange
|
||||
*
|
||||
* @return The EventInTimeRange that is the "result" of this dialog.
|
||||
*/
|
||||
private ViewInTimelineRequestedEvent makeEventInTimeRange(SingleEvent selectedEvent) {
|
||||
private ViewInTimelineRequestedEvent makeEventInTimeRange(TimelineEvent selectedEvent) {
|
||||
Duration selectedDuration = unitComboBox.getSelectionModel().getSelectedItem().getBaseUnit().getDuration().multipliedBy(amountSpinner.getValue());
|
||||
Interval range = IntervalUtils.getIntervalAround(Instant.ofEpochMilli(selectedEvent.getStartMillis()), selectedDuration);
|
||||
return new ViewInTimelineRequestedEvent(Collections.singleton(selectedEvent.getEventID()), range);
|
||||
@ -341,14 +349,14 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
}
|
||||
|
||||
/**
|
||||
* TableCell that shows a EventType including the associated icon.
|
||||
* TableCell that shows a TimelineEventType including the associated icon.
|
||||
*
|
||||
* @param <X> Anything
|
||||
*/
|
||||
static private class TypeTableCell<X> extends TableCell<X, EventType> {
|
||||
static private class TypeTableCell<X> extends TableCell<X, TimelineEventType> {
|
||||
|
||||
@Override
|
||||
protected void updateItem(EventType item, boolean empty) {
|
||||
protected void updateItem(TimelineEventType item, boolean empty) {
|
||||
super.updateItem(item, empty);
|
||||
|
||||
if (item == null || empty) {
|
||||
@ -356,7 +364,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
|
||||
setGraphic(null);
|
||||
} else {
|
||||
setText(item.getDisplayName());
|
||||
setGraphic(new ImageView(item.getFXImage()));
|
||||
setGraphic(new ImageView(EventTypeUtils.getImagePath(item)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
718
Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
Normal file → Executable file
718
Core/src/org/sleuthkit/autopsy/timeline/TimeLineController.java
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
131
Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java
Executable file
131
Core/src/org/sleuthkit/autopsy/timeline/TimeLineModule.java
Executable file
@ -0,0 +1,131 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import java.beans.PropertyChangeEvent;
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.util.logging.Level;
|
||||
import javafx.application.Platform;
|
||||
import javax.swing.SwingUtilities;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.ingest.IngestManager;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* Manages listeners and the controller.
|
||||
*
|
||||
*/
|
||||
public class TimeLineModule {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(TimeLineModule.class.getName());
|
||||
|
||||
private static final Object controllerLock = new Object();
|
||||
private static TimeLineController controller;
|
||||
|
||||
/**
|
||||
* provides static utilities, can not be instantiated
|
||||
*/
|
||||
private TimeLineModule() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get instance of the controller for the current case
|
||||
*
|
||||
* @return the controller for the current case.
|
||||
*
|
||||
* @throws NoCurrentCaseException If there is no case open.
|
||||
* @throws TskCoreException If there was a problem accessing the case
|
||||
* database.
|
||||
*
|
||||
*/
|
||||
public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException {
|
||||
synchronized (controllerLock) {
|
||||
if (controller == null) {
|
||||
controller = new TimeLineController(Case.getCurrentCaseThrows());
|
||||
}
|
||||
return controller;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is invoked by virtue of the OnStart annotation on the OnStart
|
||||
* class class
|
||||
*/
|
||||
static void onStart() {
|
||||
Platform.setImplicitExit(false);
|
||||
logger.info("Setting up TimeLine listeners"); //NON-NLS
|
||||
|
||||
IngestManager.getInstance().addIngestModuleEventListener(new IngestModuleEventListener());
|
||||
Case.addPropertyChangeListener(new CaseEventListener());
|
||||
}
|
||||
|
||||
/**
|
||||
* Listener for case events.
|
||||
*/
|
||||
static private class CaseEventListener implements PropertyChangeListener {
|
||||
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
try {
|
||||
getController().handleCaseEvent(evt);
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
// ignore
|
||||
return;
|
||||
} catch (TskCoreException ex) {
|
||||
MessageNotifyUtil.Message.error("Error creating timeline controller.");
|
||||
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
|
||||
}
|
||||
|
||||
if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) {
|
||||
// we care only about case closing here
|
||||
if (evt.getNewValue() == null) {
|
||||
synchronized (controllerLock) {
|
||||
if (controller != null) {
|
||||
SwingUtilities.invokeLater(controller::shutDownTimeLine);
|
||||
}
|
||||
controller = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Listener for IngestModuleEvents
|
||||
*/
|
||||
static private class IngestModuleEventListener implements PropertyChangeListener {
|
||||
|
||||
@Override
|
||||
public void propertyChange(PropertyChangeEvent evt) {
|
||||
try {
|
||||
getController().handleIngestModuleEvent(evt);
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
// ignore
|
||||
return;
|
||||
} catch (TskCoreException ex) {
|
||||
MessageNotifyUtil.Message.error("Error creating timeline controller.");
|
||||
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
0
Core/src/org/sleuthkit/autopsy/timeline/TimeLineTopComponent.form
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/TimeLineTopComponent.form
Normal file → Executable file
46
Core/src/org/sleuthkit/autopsy/timeline/TimeLineTopComponent.java
Normal file → Executable file
46
Core/src/org/sleuthkit/autopsy/timeline/TimeLineTopComponent.java
Normal file → Executable file
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import com.google.common.collect.ImmutableList;
|
||||
import java.awt.BorderLayout;
|
||||
import java.awt.Component;
|
||||
import java.awt.KeyboardFocusManager;
|
||||
@ -58,6 +59,7 @@ import org.openide.windows.RetainLocation;
|
||||
import org.openide.windows.TopComponent;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.actions.AddBookmarkTagAction;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
|
||||
import org.sleuthkit.autopsy.corecomponents.DataContentPanel;
|
||||
@ -78,6 +80,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.tree.EventsTree;
|
||||
import org.sleuthkit.autopsy.timeline.ui.filtering.FilterSetPanel;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomSettingsPane;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.VersionNumber;
|
||||
|
||||
/**
|
||||
* TopComponent for the Timeline feature.
|
||||
@ -165,7 +168,9 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
*/
|
||||
@Override
|
||||
public void invalidated(Observable observable) {
|
||||
List<Long> selectedEventIDs = controller.getSelectedEventIDs();
|
||||
// make a copy because this list gets updated as the user navigates around
|
||||
// and causes concurrent access exceptions
|
||||
List<Long> selectedEventIDs = ImmutableList.copyOf(controller.getSelectedEventIDs());
|
||||
|
||||
//depending on the active view mode, we either update the dataResultPanel, or update the contentViewerPanel directly.
|
||||
switch (controller.getViewMode()) {
|
||||
@ -196,9 +201,6 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
contentViewerPanel.setNode(null);
|
||||
}
|
||||
});
|
||||
} catch (NoCurrentCaseException ex) {
|
||||
//Since the case is closed, the user probably doesn't care about this, just log it as a precaution.
|
||||
logger.log(Level.SEVERE, "There was no case open to lookup the Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to lookup Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
|
||||
Platform.runLater(() -> {
|
||||
@ -254,10 +256,11 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a "shell" version of the top component for the Timeline
|
||||
* feature which has only Swing components, no controller, and no listeners.
|
||||
* This constructor conforms to the NetBeans window system requirement that
|
||||
* Constructs a "shell" version of the top component for this Timeline feature
|
||||
* which has only Swing components, no controller, and no listeners.
|
||||
* This constructor conforms to the NetBeans window system requirements that
|
||||
* all top components have a public, no argument constructor.
|
||||
*
|
||||
*/
|
||||
public TimeLineTopComponent() {
|
||||
initComponents();
|
||||
@ -280,21 +283,21 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
dataResultPanel.open(); //get the explorermanager
|
||||
contentViewerPanel.initialize();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Constructs a fully functional top component for the Timeline feature.
|
||||
*
|
||||
* @param controller The TimeLineController for this top component.
|
||||
* Constructs a full functional top component for the Timeline feature.
|
||||
*
|
||||
* @param controller The TimeLineController for ths top compenent.
|
||||
*/
|
||||
public TimeLineTopComponent(TimeLineController controller) {
|
||||
this();
|
||||
|
||||
|
||||
this.controller = controller;
|
||||
|
||||
|
||||
Platform.runLater(this::initFXComponents);
|
||||
|
||||
//set up listeners
|
||||
TimeLineController.getTimeZone().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString()));
|
||||
TimeLineController.timeZoneProperty().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString()));
|
||||
controller.getSelectedEventIDs().addListener(selectedEventsListener);
|
||||
|
||||
//Listen to ViewMode and adjust GUI componenets as needed.
|
||||
@ -459,6 +462,9 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
private javax.swing.JSplitPane splitYPane;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
|
||||
@NbBundle.Messages ({
|
||||
"Timeline.old.version= This Case was created with an older version of Autopsy.\nThe Timeline with not show events from data sources added with the older version of Autopsy"
|
||||
})
|
||||
@Override
|
||||
public void componentOpened() {
|
||||
super.componentOpened();
|
||||
@ -467,6 +473,18 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
|
||||
//add listener that maintains correct selection in the Global Actions Context
|
||||
KeyboardFocusManager.getCurrentKeyboardFocusManager()
|
||||
.addPropertyChangeListener("focusOwner", focusPropertyListener);
|
||||
|
||||
VersionNumber version = Case.getCurrentCase().getSleuthkitCase().getDBSchemaCreationVersion();
|
||||
int major = version.getMajor();
|
||||
int minor = version.getMinor();
|
||||
|
||||
if(major < 8 || (major == 8 && minor <= 2)) {
|
||||
Platform.runLater(() -> {
|
||||
Notifications.create()
|
||||
.owner(jFXViewPanel.getScene().getWindow())
|
||||
.text(Bundle.Timeline_old_version()).showInformation();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/ViewMode.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/ViewMode.java
Normal file → Executable file
20
Core/src/org/sleuthkit/autopsy/timeline/WrappingListCell.java
Normal file → Executable file
20
Core/src/org/sleuthkit/autopsy/timeline/WrappingListCell.java
Normal file → Executable file
@ -1,9 +1,21 @@
|
||||
/*
|
||||
* To change this license header, choose License Headers in Project Properties.
|
||||
* To change this template file, choose Tools | Templates
|
||||
* and open the template in the editor.
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2016 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.sleuthkit.autopsy.timeline;
|
||||
|
||||
import javafx.scene.control.ListCell;
|
||||
|
319
Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java
Executable file
319
Core/src/org/sleuthkit/autopsy/timeline/actions/AddManualEvent.java
Executable file
@ -0,0 +1,319 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2019 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.awt.Dialog;
|
||||
import java.time.Instant;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import static java.util.Arrays.asList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.logging.Level;
|
||||
import javafx.application.Platform;
|
||||
import javafx.embed.swing.JFXPanel;
|
||||
import javafx.fxml.FXML;
|
||||
import javafx.scene.Scene;
|
||||
import javafx.scene.control.Alert;
|
||||
import javafx.scene.control.ButtonBase;
|
||||
import javafx.scene.control.ButtonType;
|
||||
import javafx.scene.control.ChoiceBox;
|
||||
import javafx.scene.control.ComboBox;
|
||||
import javafx.scene.control.DialogPane;
|
||||
import javafx.scene.control.TextField;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import javafx.util.StringConverter;
|
||||
import javax.swing.JDialog;
|
||||
import javax.swing.SwingUtilities;
|
||||
import jfxtras.scene.control.LocalDateTimeTextField;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.controlsfx.control.textfield.TextFields;
|
||||
import org.controlsfx.tools.ValueExtractor;
|
||||
import org.controlsfx.validation.ValidationSupport;
|
||||
import org.controlsfx.validation.Validator;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
|
||||
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.datamodel.Blackboard;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
|
||||
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
|
||||
import org.sleuthkit.datamodel.DataSource;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TimelineEventType;
|
||||
|
||||
/**
|
||||
* Action that allows the user the manually create timeline events. It prompts
|
||||
* the user for event data and then adds it to the case via an artifact.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"AddManualEvent.text=Add Event",
|
||||
"AddManualEvent.longText=Manually add an event to the timeline."})
|
||||
public class AddManualEvent extends Action {
|
||||
|
||||
private final static Logger logger = Logger.getLogger(AddManualEvent.class.getName());
|
||||
private static final String MANUAL_CREATION = "Manual Creation"; //NON-NLS
|
||||
private static final Image ADD_EVENT_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/add.png", 16, 16, true, true, true); // NON-NLS
|
||||
|
||||
/**
|
||||
* Initialize the custom value extractor used by the ValidationSupport for
|
||||
* the LocalDateTimeTextField in the EventCreationDialogPane.
|
||||
*/
|
||||
static {
|
||||
ValueExtractor.addObservableValueExtractor(LocalDateTimeTextField.class::isInstance,
|
||||
control -> ((LocalDateTimeTextField) control).localDateTimeProperty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Action that allows the user the manually create timeline
|
||||
* events. It prompts the user for event data with a dialog and then adds it
|
||||
* to the case via an artifact. The datetiem in the dialog will be set to
|
||||
* "now" when the action is invoked.
|
||||
*
|
||||
* @param controller The controller for this action to use.
|
||||
*
|
||||
*/
|
||||
public AddManualEvent(TimeLineController controller) {
|
||||
this(controller, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Action that allows the user the manually create timeline
|
||||
* events. It prompts the user for event data with a dialog and then adds it
|
||||
* to the case via an artifact.
|
||||
*
|
||||
* @param controller The controller for this action to use.
|
||||
* @param epochMillis The initial datetime to populate the dialog with. The
|
||||
* user can ove ride this.
|
||||
*/
|
||||
public AddManualEvent(TimeLineController controller, Long epochMillis) {
|
||||
super(Bundle.AddManualEvent_text());
|
||||
setGraphic(new ImageView(ADD_EVENT_IMAGE));
|
||||
setLongText(Bundle.AddManualEvent_longText());
|
||||
|
||||
setEventHandler(actionEvent -> SwingUtilities.invokeLater(() -> {
|
||||
JEventCreationDialog dialog = new JEventCreationDialog(controller, epochMillis, SwingUtilities.windowForComponent(controller.getTopComponent()));
|
||||
dialog.setVisible(true);
|
||||
//actual event creation happens in the ok button listener.
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Use the supplied ManualEventInfo to make an TSK_TL_EVENT artifact which
|
||||
* will trigger adding a TimelineEvent.
|
||||
*
|
||||
* @param eventInfo The ManualEventInfo with the info needed to create an
|
||||
* event.
|
||||
*
|
||||
* @throws IllegalArgumentException
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"AddManualEvent.createArtifactFailed=Failed to create artifact for event.",
|
||||
"AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard."})
|
||||
private void addEvent(TimeLineController controller, ManualEventInfo eventInfo) throws IllegalArgumentException {
|
||||
SleuthkitCase sleuthkitCase = controller.getEventsModel().getSleuthkitCase();
|
||||
|
||||
try {
|
||||
//Use the current examiners name plus a fixed string as the source / module name.
|
||||
String source = MANUAL_CREATION + ": " + sleuthkitCase.getCurrentExaminer().getLoginName();
|
||||
|
||||
BlackboardArtifact artifact = sleuthkitCase.newBlackboardArtifact(TSK_TL_EVENT, eventInfo.datasource.getId());
|
||||
artifact.addAttributes(asList(
|
||||
new BlackboardAttribute(
|
||||
TSK_TL_EVENT_TYPE, source,
|
||||
TimelineEventType.USER_CREATED.getTypeID()),
|
||||
new BlackboardAttribute(
|
||||
TSK_DESCRIPTION, source,
|
||||
eventInfo.description),
|
||||
new BlackboardAttribute(
|
||||
TSK_DATETIME, source,
|
||||
eventInfo.time)
|
||||
));
|
||||
try {
|
||||
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
|
||||
} catch (Blackboard.BlackboardException ex) {
|
||||
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error creatig new artifact.", ex); //NON-NLS
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_createArtifactFailed(), ButtonType.OK).showAndWait();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclass of JDialog used to dislpay the JFXPanel with the event creation
|
||||
* widgets.
|
||||
*/
|
||||
private final class JEventCreationDialog extends JDialog {
|
||||
|
||||
private final JFXPanel jfxPanel = new JFXPanel();
|
||||
|
||||
private JEventCreationDialog(TimeLineController controller, Long epochMillis, java.awt.Window owner) {
|
||||
super(owner, Bundle.AddManualEvent_text(), Dialog.ModalityType.DOCUMENT_MODAL);
|
||||
setIconImages(owner.getIconImages());
|
||||
setResizable(false);
|
||||
add(jfxPanel);
|
||||
|
||||
// make and configure the JavaFX components.
|
||||
Platform.runLater(() -> {
|
||||
// Custom DialogPane defined below.
|
||||
EventCreationDialogPane customPane = new EventCreationDialogPane(controller, epochMillis);
|
||||
//cancel button just closes the dialog
|
||||
((ButtonBase) customPane.lookupButton(ButtonType.CANCEL)).setOnAction(event -> dispose());
|
||||
//configure ok button to pull ManualEventInfo object and add it to case.
|
||||
((ButtonBase) customPane.lookupButton(ButtonType.OK)).setOnAction(event -> {
|
||||
ManualEventInfo manualEventInfo = customPane.getManualEventInfo();
|
||||
if (manualEventInfo != null) {
|
||||
addEvent(controller, manualEventInfo);
|
||||
}
|
||||
dispose(); //close and dispose the dialog.
|
||||
});
|
||||
|
||||
jfxPanel.setScene(new Scene(customPane));
|
||||
customPane.installValidation();
|
||||
SwingUtilities.invokeLater(() -> {
|
||||
//size and position dialog on EDT
|
||||
pack();
|
||||
setLocationRelativeTo(owner);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* The DialogPane that hosts the controls/widgets that allows the user
|
||||
* to enter the event information.
|
||||
*/
|
||||
private class EventCreationDialogPane extends DialogPane {
|
||||
|
||||
@FXML
|
||||
private ChoiceBox<DataSource> dataSourceChooser;
|
||||
@FXML
|
||||
private TextField descriptionTextField;
|
||||
@FXML
|
||||
private ComboBox<String> timeZoneChooser;
|
||||
@FXML
|
||||
private LocalDateTimeTextField timePicker;
|
||||
|
||||
private final List<String> timeZoneList = TimeZoneUtils.createTimeZoneList();
|
||||
private final ValidationSupport validationSupport = new ValidationSupport();
|
||||
private final TimeLineController controller;
|
||||
|
||||
private EventCreationDialogPane(TimeLineController controller, Long epochMillis) {
|
||||
this.controller = controller;
|
||||
FXMLConstructor.construct(this, "EventCreationDialog.fxml"); //NON-NLS
|
||||
if (epochMillis == null) {
|
||||
timePicker.setLocalDateTime(LocalDateTime.now());
|
||||
} else {
|
||||
timePicker.setLocalDateTime(LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMillis), TimeLineController.getTimeZoneID()));
|
||||
}
|
||||
}
|
||||
|
||||
@FXML
|
||||
@NbBundle.Messages({"# {0} - datasource name", "# {1} - datasource id",
|
||||
"AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})",
|
||||
"AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case."})
|
||||
private void initialize() {
|
||||
assert descriptionTextField != null : "fx:id=\"descriptionTextField\" was not injected: check your FXML file 'EventCreationDialog.fxml'.";//NON-NLS
|
||||
|
||||
timeZoneChooser.getItems().setAll(timeZoneList);
|
||||
timeZoneChooser.getSelectionModel().select(TimeZoneUtils.createTimeZoneString(TimeLineController.getTimeZone()));
|
||||
TextFields.bindAutoCompletion(timeZoneChooser.getEditor(), timeZoneList);
|
||||
|
||||
dataSourceChooser.setConverter(new StringConverter<DataSource>() {
|
||||
@Override
|
||||
public String toString(DataSource dataSource) {
|
||||
return Bundle.AddManualEvent_EventCreationDialogPane_dataSourceStringConverter_template(dataSource.getName(), dataSource.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataSource fromString(String string) {
|
||||
throw new UnsupportedOperationException(); // This method should never get called.
|
||||
}
|
||||
});
|
||||
try {
|
||||
dataSourceChooser.getItems().setAll(controller.getAutopsyCase().getSleuthkitCase().getDataSources());
|
||||
dataSourceChooser.getSelectionModel().select(0);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error getting datasources in case.", ex);//NON-NLS
|
||||
SwingUtilities.invokeLater(() -> MessageNotifyUtil.Message.error(Bundle.AddManualEvent_EventCreationDialogPane_initialize_dataSourcesError()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Install/Configure the ValidationSupport.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"AddManualEvent.validation.description=Description is required.",
|
||||
"AddManualEvent.validation.datetime=Invalid datetime",
|
||||
"AddManualEvent.validation.timezone=Invalid time zone",})
|
||||
private void installValidation() {
|
||||
validationSupport.registerValidator(descriptionTextField, false,
|
||||
Validator.createEmptyValidator(Bundle.AddManualEvent_validation_description()));
|
||||
validationSupport.registerValidator(timePicker, false,
|
||||
Validator.createPredicateValidator(Objects::nonNull, Bundle.AddManualEvent_validation_description()));
|
||||
validationSupport.registerValidator(timeZoneChooser, false,
|
||||
Validator.createPredicateValidator((String zone) -> timeZoneList.contains(zone.trim()), Bundle.AddManualEvent_validation_timezone()));
|
||||
|
||||
validationSupport.initInitialDecoration();
|
||||
|
||||
//The ok button is only enabled if all fields are validated.
|
||||
lookupButton(ButtonType.OK).disableProperty().bind(validationSupport.invalidProperty());
|
||||
}
|
||||
|
||||
/**
|
||||
* Combine the user entered data into a ManulEventInfo object.
|
||||
*
|
||||
* @return The ManualEventInfo containing the user entered event
|
||||
* info.
|
||||
*/
|
||||
private ManualEventInfo getManualEventInfo() {
|
||||
//Trim off the offset part of the string from the chooser, to get something that ZoneId can parse.
|
||||
String zone = StringUtils.substringAfter(timeZoneChooser.getValue(), ")").trim(); //NON-NLS
|
||||
long toEpochSecond = timePicker.getLocalDateTime().atZone(ZoneId.of(zone)).toEpochSecond();
|
||||
return new ManualEventInfo(dataSourceChooser.getValue(), descriptionTextField.getText(), toEpochSecond);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Info required from user to manually create a timeline event.
|
||||
*/
|
||||
private static class ManualEventInfo {
|
||||
|
||||
private final DataSource datasource;
|
||||
private final String description;
|
||||
private final long time;
|
||||
|
||||
private ManualEventInfo(DataSource datasource, String description, long time) {
|
||||
this.datasource = datasource;
|
||||
this.description = description;
|
||||
this.time = time;
|
||||
}
|
||||
}
|
||||
}
|
2
Core/src/org/sleuthkit/autopsy/timeline/actions/Back.java
Normal file → Executable file
2
Core/src/org/sleuthkit/autopsy/timeline/actions/Back.java
Normal file → Executable file
@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
//TODO: This and the corresponding imageanalyzer action are identical except for the type of the controller... abstract something! -jm
|
||||
public class Back extends Action {
|
||||
|
||||
private static final Image BACK_IMAGE = new Image("/org/sleuthkit/autopsy/images/resultset_previous.png", 16, 16, true, true, true); // NON-NLS
|
||||
private static final Image BACK_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/arrow-180.png", 16, 16, true, true, true); // NON-NLS
|
||||
|
||||
private final TimeLineController controller;
|
||||
|
||||
|
@ -1,3 +1,14 @@
|
||||
AddManualEvent.createArtifactFailed=Failed to create artifact for event.
|
||||
# {0} - datasource name
|
||||
# {1} - datasource id
|
||||
AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})
|
||||
AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case.
|
||||
AddManualEvent.longText=Manually add an event to the timeline.
|
||||
AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard.
|
||||
AddManualEvent.text=Add Event
|
||||
AddManualEvent.validation.datetime=Invalid datetime
|
||||
AddManualEvent.validation.description=Description is required.
|
||||
AddManualEvent.validation.timezone=Invalid time zone
|
||||
# {0} - action accelerator keys
|
||||
Back.longText=Back: {0}\nGo back to the last view settings.
|
||||
Back.text=Back
|
||||
@ -10,8 +21,6 @@ OpenReportAction.MissingReportFileMessage=The report file no longer exists.
|
||||
OpenReportAction.NoAssociatedEditorMessage=There is no associated editor for reports of this type or the associated application failed to launch.
|
||||
OpenReportAction.NoOpenInEditorSupportMessage=This platform (operating system) does not support opening a file in an editor this way.
|
||||
OpenReportAction.ReportFileOpenPermissionDeniedMessage=Permission to open the report file was denied.
|
||||
RebuildDataBase.longText=Update the DB to include new events.
|
||||
RebuildDataBase.text=Update DB
|
||||
ResetFilters.text=Reset all filters
|
||||
RestFilters.longText=Reset all filters to their default state.
|
||||
SaveSnapShotAsReport.action.dialogs.title=Timeline
|
||||
@ -32,8 +41,12 @@ ViewArtifactInTimelineAction.displayName=View Result in Timeline...
|
||||
ViewFileInTimelineAction.viewFile.displayName=View File in Timeline...
|
||||
ViewFileInTimelineAction.viewSourceFile.displayName=View Source File in Timeline...
|
||||
ZoomIn.action.text=Zoom in
|
||||
ZoomIn.errorMessage=Error zooming in.
|
||||
ZoomIn.longText=Zoom in to view about half as much time.
|
||||
ZoomOut.action.text=Zoom out
|
||||
ZoomOut.disabledProperty.errorMessage=Error getting spanning interval.
|
||||
ZoomOut.errorMessage=Error zooming out.
|
||||
ZoomOut.longText=Zoom out to view about 50% more time.
|
||||
ZoomToEvents.action.text=Zoom to events
|
||||
ZoomToEvents.disabledProperty.errorMessage=Error getting spanning interval.
|
||||
ZoomToEvents.longText=Zoom out to show the nearest events.
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/actions/Bundle_ja.properties
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/actions/Bundle_ja.properties
Normal file → Executable file
52
Core/src/org/sleuthkit/autopsy/timeline/actions/EventCreationDialog.fxml
Executable file
52
Core/src/org/sleuthkit/autopsy/timeline/actions/EventCreationDialog.fxml
Executable file
@ -0,0 +1,52 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<?import javafx.geometry.Insets?>
|
||||
<?import javafx.scene.control.ButtonType?>
|
||||
<?import javafx.scene.control.ChoiceBox?>
|
||||
<?import javafx.scene.control.ComboBox?>
|
||||
<?import javafx.scene.control.DialogPane?>
|
||||
<?import javafx.scene.control.Label?>
|
||||
<?import javafx.scene.control.TextField?>
|
||||
<?import javafx.scene.layout.ColumnConstraints?>
|
||||
<?import javafx.scene.layout.GridPane?>
|
||||
<?import javafx.scene.layout.RowConstraints?>
|
||||
<?import jfxtras.scene.control.LocalDateTimeTextField?>
|
||||
|
||||
<fx:root expanded="true" maxHeight="159.0" maxWidth="555.0" minHeight="159.0" minWidth="555.0" prefHeight="159.0" prefWidth="555.0" type="DialogPane" xmlns="http://javafx.com/javafx/8.0.141" xmlns:fx="http://javafx.com/fxml/1">
|
||||
<buttonTypes>
|
||||
<ButtonType fx:constant="OK" />
|
||||
<ButtonType fx:constant="CANCEL" />
|
||||
</buttonTypes>
|
||||
<content>
|
||||
<GridPane fx:id="gridPane" hgap="5.0" vgap="5.0">
|
||||
<columnConstraints>
|
||||
<ColumnConstraints hgrow="NEVER" maxWidth="93.0" minWidth="10.0" />
|
||||
<ColumnConstraints hgrow="SOMETIMES" maxWidth="193.0" minWidth="10.0" />
|
||||
<ColumnConstraints hgrow="NEVER" />
|
||||
<ColumnConstraints hgrow="SOMETIMES" minWidth="10.0" />
|
||||
</columnConstraints>
|
||||
<rowConstraints>
|
||||
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
|
||||
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
|
||||
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
|
||||
</rowConstraints>
|
||||
<children>
|
||||
<Label text="Description:" GridPane.rowIndex="1" />
|
||||
<TextField fx:id="descriptionTextField" prefHeight="26.0" prefWidth="278.0" GridPane.columnIndex="1" GridPane.columnSpan="3" GridPane.rowIndex="1" />
|
||||
<Label text="DateTime" GridPane.rowIndex="2" />
|
||||
<Label text="Time Zone" GridPane.columnIndex="2" GridPane.rowIndex="2">
|
||||
<padding>
|
||||
<Insets left="15.0" />
|
||||
</padding>
|
||||
</Label>
|
||||
<ComboBox fx:id="timeZoneChooser" editable="true" prefHeight="28.0" prefWidth="214.0" GridPane.columnIndex="3" GridPane.rowIndex="2" />
|
||||
<LocalDateTimeTextField fx:id="timePicker" prefHeight="26.0" prefWidth="166.0" GridPane.columnIndex="1" GridPane.rowIndex="2" />
|
||||
<Label text="DataSource:" />
|
||||
<ChoiceBox fx:id="dataSourceChooser" prefHeight="25.0" prefWidth="168.0" GridPane.columnIndex="1" GridPane.columnSpan="3" />
|
||||
</children>
|
||||
<padding>
|
||||
<Insets bottom="5.0" left="5.0" right="5.0" top="5.0" />
|
||||
</padding>
|
||||
</GridPane>
|
||||
</content>
|
||||
</fx:root>
|
2
Core/src/org/sleuthkit/autopsy/timeline/actions/Forward.java
Normal file → Executable file
2
Core/src/org/sleuthkit/autopsy/timeline/actions/Forward.java
Normal file → Executable file
@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
//TODO: This and the corresponding imageanalyzer action are identical except for the type of the controller... abstract something! -jm
|
||||
public class Forward extends Action {
|
||||
|
||||
private static final Image FORWARD_IMAGE = new Image("/org/sleuthkit/autopsy/images/resultset_next.png", 16, 16, true, true, true); // NON-NLS
|
||||
private static final Image FORWARD_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/arrow.png", 16, 16, true, true, true); // NON-NLS
|
||||
|
||||
private final TimeLineController controller;
|
||||
|
||||
|
6
Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java
Normal file → Executable file
6
Core/src/org/sleuthkit/autopsy/timeline/actions/ResetFilters.java
Normal file → Executable file
@ -22,8 +22,8 @@ import javafx.beans.binding.BooleanBinding;
|
||||
import javafx.event.ActionEvent;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
|
||||
/**
|
||||
* Action that resets the filters to their initial/default state.
|
||||
@ -44,12 +44,12 @@ public class ResetFilters extends Action {
|
||||
eventsModel = controller.getEventsModel();
|
||||
disabledProperty().bind(new BooleanBinding() {
|
||||
{
|
||||
bind(eventsModel.zoomParametersProperty());
|
||||
bind(eventsModel.zoomStateProperty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean computeValue() {
|
||||
return eventsModel.zoomParametersProperty().getValue().getFilter().equals(eventsModel.getDefaultFilter());
|
||||
return eventsModel.zoomStateProperty().getValue().getFilterState().equals(eventsModel.getDefaultFilter());
|
||||
}
|
||||
});
|
||||
setEventHandler((ActionEvent t) -> {
|
||||
|
2
Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java
Normal file → Executable file
2
Core/src/org/sleuthkit/autopsy/timeline/actions/SaveSnapshotAsReport.java
Normal file → Executable file
@ -141,7 +141,7 @@ public class SaveSnapshotAsReport extends Action {
|
||||
reportMainFilePath = new SnapShotReportWriter(currentCase,
|
||||
reportFolderPath,
|
||||
reportName,
|
||||
controller.getEventsModel().getZoomParamaters(),
|
||||
controller.getEventsModel().getZoomState(),
|
||||
generationDate, snapshot).writeReport();
|
||||
} catch (IOException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS
|
||||
|
@ -1,50 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2016 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
|
||||
/**
|
||||
* An action that rebuilds the timeline database to include any new results from
|
||||
* ingest.
|
||||
*/
|
||||
public class UpdateDB extends Action {
|
||||
|
||||
private static final Image DB_REFRESH = new Image("org/sleuthkit/autopsy/timeline/images/database_refresh.png");
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param controller The TimeLineController for this action.
|
||||
*/
|
||||
@NbBundle.Messages({
|
||||
"RebuildDataBase.text=Update DB",
|
||||
"RebuildDataBase.longText=Update the DB to include new events."})
|
||||
public UpdateDB(TimeLineController controller) {
|
||||
super(Bundle.RebuildDataBase_text());
|
||||
setLongText(Bundle.RebuildDataBase_longText());
|
||||
setGraphic(new ImageView(DB_REFRESH));
|
||||
setEventHandler(actionEvent -> controller.rebuildRepo());
|
||||
disabledProperty().bind(controller.eventsDBStaleProperty().not());
|
||||
}
|
||||
}
|
37
Core/src/org/sleuthkit/autopsy/timeline/actions/ViewArtifactInTimelineAction.java
Normal file → Executable file
37
Core/src/org/sleuthkit/autopsy/timeline/actions/ViewArtifactInTimelineAction.java
Normal file → Executable file
@ -19,14 +19,13 @@
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.AbstractAction;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.actions.SystemAction;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
@ -36,13 +35,7 @@ import org.sleuthkit.datamodel.TskCoreException;
|
||||
*/
|
||||
public final class ViewArtifactInTimelineAction extends AbstractAction {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final Set<ArtifactEventType> ARTIFACT_EVENT_TYPES =
|
||||
EventType.allTypes.stream()
|
||||
.filter((EventType t) -> t instanceof ArtifactEventType)
|
||||
.map(ArtifactEventType.class::cast)
|
||||
.collect(Collectors.toSet());
|
||||
private static final Logger logger = Logger.getLogger(ViewFileInTimelineAction.class.getName());
|
||||
|
||||
private final BlackboardArtifact artifact;
|
||||
|
||||
@ -54,26 +47,26 @@ public final class ViewArtifactInTimelineAction extends AbstractAction {
|
||||
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
SystemAction.get(OpenTimelineAction.class).showArtifactInTimeline(artifact);
|
||||
try {
|
||||
SystemAction.get(OpenTimelineAction.class).showArtifactInTimeline(artifact);
|
||||
} catch (TskCoreException ex) {
|
||||
MessageNotifyUtil.Message.error("Error opening Timeline");
|
||||
logger.log(Level.SEVERE, "Error showing timeline.", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Does the given artifact have a type that Timeline supports, and does it
|
||||
* have a positive timestamp in the supported attribute?
|
||||
* Does the given artifact have a datetime attribute?
|
||||
*
|
||||
* @param artifact The artifact to test for a supported timestamp
|
||||
*
|
||||
* @return True if this artifact has a timestamp supported by Timeline.
|
||||
*/
|
||||
public static boolean hasSupportedTimeStamp(BlackboardArtifact artifact) throws TskCoreException {
|
||||
//see if the given artifact is a supported type ...
|
||||
for (ArtifactEventType artEventType : ARTIFACT_EVENT_TYPES) {
|
||||
if (artEventType.getArtifactTypeID() == artifact.getArtifactTypeID()) {
|
||||
//... and has a non-bogus timestamp in the supported attribute
|
||||
BlackboardAttribute attribute = artifact.getAttribute(artEventType.getDateTimeAttributeType());
|
||||
if (null != attribute && attribute.getValueLong() > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (BlackboardAttribute attr : artifact.getAttributes()) {
|
||||
if (attr.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
13
Core/src/org/sleuthkit/autopsy/timeline/actions/ViewFileInTimelineAction.java
Normal file → Executable file
13
Core/src/org/sleuthkit/autopsy/timeline/actions/ViewFileInTimelineAction.java
Normal file → Executable file
@ -19,11 +19,15 @@
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.awt.event.ActionEvent;
|
||||
import java.util.logging.Level;
|
||||
import javax.swing.AbstractAction;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.util.actions.SystemAction;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
||||
import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
@ -34,6 +38,8 @@ public final class ViewFileInTimelineAction extends AbstractAction {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ViewFileInTimelineAction.class.getName());
|
||||
|
||||
private final AbstractFile file;
|
||||
|
||||
private ViewFileInTimelineAction(AbstractFile file, String displayName) {
|
||||
@ -62,6 +68,11 @@ public final class ViewFileInTimelineAction extends AbstractAction {
|
||||
|
||||
@Override
|
||||
public void actionPerformed(ActionEvent e) {
|
||||
SystemAction.get(OpenTimelineAction.class).showFileInTimeline(file);
|
||||
try {
|
||||
SystemAction.get(OpenTimelineAction.class).showFileInTimeline(file);
|
||||
} catch (TskCoreException ex) {
|
||||
MessageNotifyUtil.Message.error("Error opening Timeline");
|
||||
logger.log(Level.SEVERE, "Error showing timeline.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
19
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomIn.java
Normal file → Executable file
19
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomIn.java
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2015 Basis Technology Corp.
|
||||
* Copyright 2015-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,27 +18,40 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import javafx.scene.control.Alert;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ZoomIn extends Action {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ZoomIn.class.getName());
|
||||
|
||||
private static final Image MAGNIFIER_IN = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-in-green.png"); //NOI18N NON-NLS
|
||||
|
||||
@NbBundle.Messages({"ZoomIn.longText=Zoom in to view about half as much time.",
|
||||
"ZoomIn.action.text=Zoom in"})
|
||||
"ZoomIn.action.text=Zoom in",
|
||||
"ZoomIn.errorMessage=Error zooming in."
|
||||
})
|
||||
public ZoomIn(TimeLineController controller) {
|
||||
super(Bundle.ZoomIn_action_text());
|
||||
setLongText(Bundle.ZoomIn_longText());
|
||||
setGraphic(new ImageView(MAGNIFIER_IN));
|
||||
setEventHandler(actionEvent -> {
|
||||
controller.pushZoomInTime();
|
||||
try {
|
||||
controller.pushZoomInTime();
|
||||
} catch (TskCoreException ex) {
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.ZoomIn_errorMessage()).showAndWait();
|
||||
logger.log(Level.SEVERE, "Error zooming in.", ex);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
33
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java
Normal file → Executable file
33
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomOut.java
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2015 Basis Technology Corp.
|
||||
* Copyright 2015-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
@ -18,40 +18,61 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import javafx.beans.binding.BooleanBinding;
|
||||
import javafx.scene.control.Alert;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ZoomOut extends Action {
|
||||
|
||||
final private static Logger logger = Logger.getLogger(ZoomOut.class.getName());
|
||||
|
||||
private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png"); //NOI18N NON-NLS
|
||||
|
||||
@NbBundle.Messages({"ZoomOut.longText=Zoom out to view about 50% more time.",
|
||||
"ZoomOut.action.text=Zoom out"})
|
||||
"ZoomOut.action.text=Zoom out",
|
||||
"ZoomOut.errorMessage=Error zooming out.",
|
||||
"ZoomOut.disabledProperty.errorMessage=Error getting spanning interval."})
|
||||
public ZoomOut(TimeLineController controller) {
|
||||
super(Bundle.ZoomOut_action_text());
|
||||
setLongText(Bundle.ZoomOut_longText());
|
||||
setGraphic(new ImageView(MAGNIFIER_OUT));
|
||||
setEventHandler(actionEvent -> controller.pushZoomOutTime());
|
||||
setEventHandler(actionEvent -> {
|
||||
try {
|
||||
controller.pushZoomOutTime();
|
||||
} catch (TskCoreException ex) {
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.ZoomOut_errorMessage()).showAndWait();
|
||||
logger.log(Level.SEVERE, "Error zooming out.", ex);
|
||||
}
|
||||
});
|
||||
|
||||
//disable action when the current time range already encompases the entire case.
|
||||
disabledProperty().bind(new BooleanBinding() {
|
||||
private final FilteredEventsModel eventsModel = controller.getEventsModel();
|
||||
|
||||
{
|
||||
bind(eventsModel.zoomParametersProperty(), eventsModel.timeRangeProperty());
|
||||
bind(eventsModel.zoomStateProperty(), eventsModel.timeRangeProperty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean computeValue() {
|
||||
return eventsModel.timeRangeProperty().get().contains(eventsModel.getSpanningInterval());
|
||||
try {
|
||||
return eventsModel.getTimeRange().contains(eventsModel.getSpanningInterval());
|
||||
} catch (TskCoreException ex) {
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.ZoomOut_disabledProperty_errorMessage()).showAndWait();
|
||||
logger.log(Level.SEVERE, "Error getting spanning interval.", ex);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
29
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java
Normal file → Executable file
29
Core/src/org/sleuthkit/autopsy/timeline/actions/ZoomToEvents.java
Normal file → Executable file
@ -18,29 +18,40 @@
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.actions;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import javafx.beans.binding.BooleanBinding;
|
||||
import javafx.scene.control.Alert;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.image.ImageView;
|
||||
import org.controlsfx.control.action.Action;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.timeline.TimeLineController;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.FilteredEventsModel;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class ZoomToEvents extends Action {
|
||||
|
||||
private static final Logger logger = Logger.getLogger(ZoomToEvents.class.getName());
|
||||
private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png", 16, 16, true, true); //NOI18N NON-NLS
|
||||
|
||||
@NbBundle.Messages({"ZoomToEvents.action.text=Zoom to events",
|
||||
"ZoomToEvents.longText=Zoom out to show the nearest events."})
|
||||
"ZoomToEvents.longText=Zoom out to show the nearest events.",
|
||||
"ZoomToEvents.disabledProperty.errorMessage=Error getting spanning interval."})
|
||||
public ZoomToEvents(final TimeLineController controller) {
|
||||
super(Bundle.ZoomToEvents_action_text());
|
||||
setLongText(Bundle.ZoomToEvents_longText());
|
||||
setGraphic(new ImageView(MAGNIFIER_OUT));
|
||||
setEventHandler(actionEvent -> {
|
||||
controller.zoomOutToActivity();
|
||||
try {
|
||||
controller.zoomOutToActivity();
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error invoking ZoomToEvents action", ex);
|
||||
new Alert(Alert.AlertType.ERROR, "Error zomming").showAndWait();
|
||||
}
|
||||
});
|
||||
|
||||
//disable action when the current time range already encompases the entire case.
|
||||
@ -48,13 +59,19 @@ public class ZoomToEvents extends Action {
|
||||
private final FilteredEventsModel eventsModel = controller.getEventsModel();
|
||||
|
||||
{
|
||||
bind(eventsModel.zoomParametersProperty());
|
||||
bind(eventsModel.zoomStateProperty());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean computeValue() {
|
||||
//TODO: do a db query to see if using this action will actually result in viewable events
|
||||
return eventsModel.zoomParametersProperty().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
|
||||
try {
|
||||
//TODO: do a db query to see if using this action will actually result in viewable events
|
||||
return eventsModel.getTimeRange().contains(eventsModel.getSpanningInterval());
|
||||
} catch (TskCoreException ex) {
|
||||
new Alert(Alert.AlertType.ERROR, Bundle.ZoomToEvents_disabledProperty_errorMessage()).showAndWait();
|
||||
logger.log(Level.SEVERE, "Error getting spanning interval.", ex);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -1,537 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2016 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel;
|
||||
|
||||
import com.google.common.eventbus.EventBus;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.logging.Level;
|
||||
import javafx.beans.Observable;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.beans.property.ReadOnlyObjectWrapper;
|
||||
import javafx.collections.ListChangeListener;
|
||||
import javafx.collections.MapChangeListener;
|
||||
import javax.annotation.concurrent.GuardedBy;
|
||||
import org.joda.time.Interval;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
|
||||
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.db.EventsRepository;
|
||||
import org.sleuthkit.autopsy.timeline.events.DBUpdatedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
* This class acts as the model for a TimelineView
|
||||
*
|
||||
* Views can register listeners on properties returned by methods.
|
||||
*
|
||||
* This class is implemented as a filtered view into an underlying
|
||||
* EventsRepository.
|
||||
*
|
||||
* TODO: as many methods as possible should cache their results so as to avoid
|
||||
* unnecessary db calls through the EventsRepository -jm
|
||||
*
|
||||
* Concurrency Policy: repo is internally synchronized, so methods that only
|
||||
* access the repo atomically do not need further synchronization
|
||||
*
|
||||
* all other member state variables should only be accessed with intrinsic lock
|
||||
* of containing FilteredEventsModel held. Many methods delegate to a task
|
||||
* submitted to the dbQueryThread executor. These methods should synchronize on
|
||||
* this object, and the tasks should too. Since the tasks execute asynchronously
|
||||
* from the invoking methods, the methods will return and release the lock for
|
||||
* the tasks to obtain.
|
||||
*
|
||||
*/
|
||||
public final class FilteredEventsModel {
|
||||
|
||||
private static final Logger LOGGER = Logger.getLogger(FilteredEventsModel.class.getName());
|
||||
|
||||
/**
|
||||
* time range that spans the filtered events
|
||||
*/
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
|
||||
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper<RootFilter> requestedFilter = new ReadOnlyObjectWrapper<>();
|
||||
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper< EventTypeZoomLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(EventTypeZoomLevel.BASE_TYPE);
|
||||
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper< DescriptionLoD> requestedLOD = new ReadOnlyObjectWrapper<>(DescriptionLoD.SHORT);
|
||||
|
||||
@GuardedBy("this")
|
||||
private final ReadOnlyObjectWrapper<ZoomParams> requestedZoomParamters = new ReadOnlyObjectWrapper<>();
|
||||
|
||||
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
|
||||
|
||||
/**
|
||||
* The underlying repo for events. Atomic access to repo is synchronized
|
||||
* internally, but compound access should be done with the intrinsic lock of
|
||||
* this FilteredEventsModel object
|
||||
*/
|
||||
@GuardedBy("this")
|
||||
private final EventsRepository repo;
|
||||
private final Case autoCase;
|
||||
|
||||
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
|
||||
this.repo = repo;
|
||||
this.autoCase = repo.getAutoCase();
|
||||
repo.getDatasourcesMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
rootFilter.getDataSourcesFilter().addSubFilter(dataSourceFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
repo.getHashSetMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(change.getValueAdded(), change.getKey());
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
rootFilter.getHashHitsFilter().addSubFilter(hashSetFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
repo.getTagNames().addListener((ListChangeListener.Change<? extends TagName> c) -> {
|
||||
RootFilter rootFilter = filterProperty().get();
|
||||
TagsFilter tagsFilter = rootFilter.getTagsFilter();
|
||||
repo.syncTagsFilter(tagsFilter);
|
||||
requestedFilter.set(rootFilter.copyOf());
|
||||
});
|
||||
requestedFilter.set(getDefaultFilter());
|
||||
|
||||
//TODO: use bindings to keep these in sync? -jm
|
||||
requestedZoomParamters.addListener((Observable observable) -> {
|
||||
final ZoomParams zoomParams = requestedZoomParamters.get();
|
||||
|
||||
if (zoomParams != null) {
|
||||
synchronized (FilteredEventsModel.this) {
|
||||
requestedTypeZoom.set(zoomParams.getTypeZoomLevel());
|
||||
requestedFilter.set(zoomParams.getFilter());
|
||||
requestedTimeRange.set(zoomParams.getTimeRange());
|
||||
requestedLOD.set(zoomParams.getDescriptionLOD());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
requestedZoomParamters.bind(currentStateProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* Readonly observable property for the current ZoomParams
|
||||
*
|
||||
* @return A readonly observable property for the current ZoomParams.
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<ZoomParams> zoomParametersProperty() {
|
||||
return requestedZoomParamters.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current ZoomParams
|
||||
*
|
||||
* @return The current ZoomParams
|
||||
*/
|
||||
synchronized public ZoomParams getZoomParamaters() {
|
||||
return requestedZoomParamters.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a read only view of the time range currently in view.
|
||||
*
|
||||
* @return A read only view of the time range currently in view.
|
||||
*/
|
||||
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
|
||||
if (requestedTimeRange.get() == null) {
|
||||
requestedTimeRange.set(getSpanningInterval());
|
||||
}
|
||||
return requestedTimeRange.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<DescriptionLoD> descriptionLODProperty() {
|
||||
return requestedLOD.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<RootFilter> filterProperty() {
|
||||
return requestedFilter.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
|
||||
return requestedTypeZoom.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
/**
|
||||
* The time range currently in view.
|
||||
*
|
||||
* @return The time range currently in view.
|
||||
*/
|
||||
synchronized public Interval getTimeRange() {
|
||||
return timeRangeProperty().get();
|
||||
}
|
||||
|
||||
synchronized public DescriptionLoD getDescriptionLOD() {
|
||||
return requestedLOD.get();
|
||||
}
|
||||
|
||||
synchronized public RootFilter getFilter() {
|
||||
return requestedFilter.get();
|
||||
}
|
||||
|
||||
synchronized public EventTypeZoomLevel getEventTypeZoom() {
|
||||
return requestedTypeZoom.get();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the default filter used at startup
|
||||
*/
|
||||
public RootFilter getDefaultFilter() {
|
||||
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
|
||||
|
||||
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
|
||||
dataSourceFilter.setSelected(Boolean.TRUE);
|
||||
dataSourcesFilter.addSubFilter(dataSourceFilter);
|
||||
});
|
||||
|
||||
HashHitsFilter hashHitsFilter = new HashHitsFilter();
|
||||
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
|
||||
HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
|
||||
hashSetFilter.setSelected(Boolean.TRUE);
|
||||
hashHitsFilter.addSubFilter(hashSetFilter);
|
||||
});
|
||||
|
||||
TagsFilter tagsFilter = new TagsFilter();
|
||||
repo.getTagNames().stream().forEach(t -> {
|
||||
TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
|
||||
tagNameFilter.setSelected(Boolean.TRUE);
|
||||
tagsFilter.addSubFilter(tagNameFilter);
|
||||
});
|
||||
return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter, Collections.emptySet());
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval() {
|
||||
return repo.getBoundingEventsInterval(zoomParametersProperty().get().getTimeRange(), zoomParametersProperty().get().getFilter());
|
||||
}
|
||||
|
||||
public SingleEvent getEventById(Long eventID) {
|
||||
return repo.getEventById(eventID);
|
||||
}
|
||||
|
||||
public Set<SingleEvent> getEventsById(Collection<Long> eventIDs) {
|
||||
return repo.getEventsById(eventIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*/
|
||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
|
||||
return repo.getTagCountsByTagName(eventIDsWithTags);
|
||||
}
|
||||
|
||||
public List<Long> getEventIDs(Interval timeRange, Filter filter) {
|
||||
final Interval overlap;
|
||||
final RootFilter intersect;
|
||||
synchronized (this) {
|
||||
overlap = getSpanningInterval().overlap(timeRange);
|
||||
intersect = requestedFilter.get().copyOf();
|
||||
}
|
||||
intersect.getSubFilters().add(filter);
|
||||
return repo.getEventIDs(overlap, intersect);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a representation of all the events, within the given time range, that
|
||||
* pass the given filter, grouped by time and description such that file
|
||||
* system events for the same file, with the same timestamp, are combined
|
||||
* together.
|
||||
*
|
||||
* @return A List of combined events, sorted by timestamp.
|
||||
*/
|
||||
public List<CombinedEvent> getCombinedEvents() {
|
||||
return repo.getCombinedEvents(requestedTimeRange.get(), requestedFilter.get());
|
||||
}
|
||||
|
||||
/**
|
||||
* return the number of events that pass the requested filter and are within
|
||||
* the given time range.
|
||||
*
|
||||
* NOTE: this method does not change the requested time range
|
||||
*
|
||||
* @param timeRange
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public Map<EventType, Long> getEventCounts(Interval timeRange) {
|
||||
|
||||
final RootFilter filter;
|
||||
final EventTypeZoomLevel typeZoom;
|
||||
synchronized (this) {
|
||||
filter = requestedFilter.get();
|
||||
typeZoom = requestedTypeZoom.get();
|
||||
}
|
||||
return repo.countEvents(new ZoomParams(timeRange, typeZoom, filter, null));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the smallest interval spanning all the events from the
|
||||
* repository, ignoring any filters or requested ranges
|
||||
*/
|
||||
public Interval getSpanningInterval() {
|
||||
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the smallest interval spanning all the given events
|
||||
*/
|
||||
public Interval getSpanningInterval(Collection<Long> eventIDs) {
|
||||
return repo.getSpanningInterval(eventIDs);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time (in seconds from unix epoch) of the absolutely first
|
||||
* event available from the repository, ignoring any filters or
|
||||
* requested ranges
|
||||
*/
|
||||
public Long getMinTime() {
|
||||
return repo.getMinTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the time (in seconds from unix epoch) of the absolutely last
|
||||
* event available from the repository, ignoring any filters or
|
||||
* requested ranges
|
||||
*/
|
||||
public Long getMaxTime() {
|
||||
return repo.getMaxTime();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return a list of event clusters at the requested zoom levels that are
|
||||
* within the requested time range and pass the requested filter
|
||||
*/
|
||||
public List<EventStripe> getEventStripes() {
|
||||
final Interval range;
|
||||
final RootFilter filter;
|
||||
final EventTypeZoomLevel zoom;
|
||||
final DescriptionLoD lod;
|
||||
synchronized (this) {
|
||||
range = requestedTimeRange.get();
|
||||
filter = requestedFilter.get();
|
||||
zoom = requestedTypeZoom.get();
|
||||
lod = requestedLOD.get();
|
||||
}
|
||||
return repo.getEventStripes(new ZoomParams(range, zoom, filter, lod));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param params
|
||||
*
|
||||
* @return a list of aggregated events that are within the requested time
|
||||
* range and pass the requested filter, using the given aggregation
|
||||
* to control the grouping of events
|
||||
*/
|
||||
public List<EventStripe> getEventStripes(ZoomParams params) {
|
||||
return repo.getEventStripes(params);
|
||||
}
|
||||
|
||||
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) {
|
||||
ContentTag contentTag = evt.getAddedTag();
|
||||
Content content = contentTag.getContent();
|
||||
Set<Long> updatedEventIDs = repo.addTag(content.getId(), null, contentTag, null);
|
||||
return postTagsAdded(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) {
|
||||
BlackboardArtifactTag artifactTag = evt.getAddedTag();
|
||||
BlackboardArtifact artifact = artifactTag.getArtifact();
|
||||
Set<Long> updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, null);
|
||||
return postTagsAdded(updatedEventIDs);
|
||||
}
|
||||
|
||||
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) {
|
||||
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
|
||||
try {
|
||||
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
|
||||
return postTagsDeleted(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex); //NON-NLS
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) {
|
||||
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
|
||||
try {
|
||||
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
|
||||
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
|
||||
Set<Long> updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
|
||||
return postTagsDeleted(updatedEventIDs);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex); //NON-NLS
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a List of event IDs for the events that are derived from the given
|
||||
* file.
|
||||
*
|
||||
* @param file The AbstractFile to get derived event IDs
|
||||
* for.
|
||||
* @param includeDerivedArtifacts If true, also get event IDs for events
|
||||
* derived from artifacts derived form this
|
||||
* file. If false, only gets events derived
|
||||
* directly from this file (file system
|
||||
* timestamps).
|
||||
*
|
||||
* @return A List of event IDs for the events that are derived from the
|
||||
* given file.
|
||||
*/
|
||||
public List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) {
|
||||
return repo.getEventIDsForFile(file, includeDerivedArtifacts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a List of event IDs for the events that are derived from the given
|
||||
* artifact.
|
||||
*
|
||||
* @param artifact The BlackboardArtifact to get derived event IDs for.
|
||||
*
|
||||
* @return A List of event IDs for the events that are derived from the
|
||||
* given artifact.
|
||||
*/
|
||||
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) {
|
||||
return repo.getEventIDsForArtifact(artifact);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a TagsAddedEvent to all registered subscribers, if the given set of
|
||||
* updated event IDs is not empty.
|
||||
*
|
||||
* @param updatedEventIDs The set of event ids to be included in the
|
||||
* TagsAddedEvent.
|
||||
*
|
||||
* @return True if an event was posted.
|
||||
*/
|
||||
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsAddedEvent(updatedEventIDs));
|
||||
}
|
||||
return tagsUpdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a TagsDeletedEvent to all registered subscribers, if the given set
|
||||
* of updated event IDs is not empty.
|
||||
*
|
||||
* @param updatedEventIDs The set of event ids to be included in the
|
||||
* TagsDeletedEvent.
|
||||
*
|
||||
* @return True if an event was posted.
|
||||
*/
|
||||
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
|
||||
boolean tagsUpdated = !updatedEventIDs.isEmpty();
|
||||
if (tagsUpdated) {
|
||||
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
|
||||
}
|
||||
return tagsUpdated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register the given object to receive events.
|
||||
*
|
||||
* @param o The object to register. Must implement public methods annotated
|
||||
* with Subscribe.
|
||||
*/
|
||||
synchronized public void registerForEvents(Object o) {
|
||||
eventbus.register(o);
|
||||
}
|
||||
|
||||
/**
|
||||
* Un-register the given object, so it no longer receives events.
|
||||
*
|
||||
* @param o The object to un-register.
|
||||
*/
|
||||
synchronized public void unRegisterForEvents(Object o) {
|
||||
eventbus.unregister(o);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a DBUpdatedEvent to all registered subscribers.
|
||||
*/
|
||||
public void postDBUpdated() {
|
||||
eventbus.post(new DBUpdatedEvent());
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a RefreshRequestedEvent to all registered subscribers.
|
||||
*/
|
||||
public void postRefreshRequest() {
|
||||
eventbus.post(new RefreshRequestedEvent());
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)Post an AutopsyEvent received from another event distribution system
|
||||
* locally to all registered subscribers.
|
||||
*/
|
||||
public void postAutopsyEventLocally(AutopsyEvent event) {
|
||||
eventbus.post(event);
|
||||
}
|
||||
|
||||
}
|
@ -1,207 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-16 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.text.MessageFormat;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface ArtifactEventType extends EventType {
|
||||
|
||||
public static final Logger LOGGER = Logger.getLogger(ArtifactEventType.class.getName());
|
||||
|
||||
/**
|
||||
* Get the artifact type this event type is derived from.
|
||||
*
|
||||
* @return The artifact type this event type is derived from.
|
||||
*/
|
||||
public BlackboardArtifact.Type getArtifactType();
|
||||
|
||||
/**
|
||||
* The attribute type this event type is derived from.
|
||||
*
|
||||
* @return The attribute type this event type is derived from.
|
||||
*/
|
||||
public BlackboardAttribute.Type getDateTimeAttributeType();
|
||||
|
||||
/**
|
||||
* Get the ID of the the artifact type that this EventType is derived from.
|
||||
*
|
||||
* @return the ID of the the artifact type that this EventType is derived
|
||||
* from.
|
||||
*/
|
||||
public default int getArtifactTypeID() {
|
||||
return getArtifactType().getTypeID();
|
||||
}
|
||||
|
||||
/**
|
||||
* given an artifact, pull out the time stamp, and compose the descriptions.
|
||||
* Each implementation of ArtifactEventType needs to implement
|
||||
* parseAttributesHelper() as hook for buildEventDescription(org.sleuthkit.datamodel.BlackboardArtifact)
|
||||
* to invoke. Most subtypes can use this default implementation.
|
||||
*
|
||||
* @param artf
|
||||
*
|
||||
* @return an AttributeEventDescription containing the timestamp
|
||||
* and description information
|
||||
*
|
||||
* @throws TskCoreException
|
||||
*/
|
||||
default AttributeEventDescription parseAttributesHelper(BlackboardArtifact artf) throws TskCoreException {
|
||||
final BlackboardAttribute dateTimeAttr = artf.getAttribute(getDateTimeAttributeType());
|
||||
|
||||
long time = dateTimeAttr.getValueLong();
|
||||
String shortDescription = getShortExtractor().apply(artf);
|
||||
String medDescription = shortDescription + " : " + getMedExtractor().apply(artf);
|
||||
String fullDescription = medDescription + " : " + getFullExtractor().apply(artf);
|
||||
return new AttributeEventDescription(time, shortDescription, medDescription, fullDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a function from an artifact to a String to use as part of the
|
||||
* full event description
|
||||
*/
|
||||
Function<BlackboardArtifact, String> getFullExtractor();
|
||||
|
||||
/**
|
||||
* @return a function from an artifact to a String to use as part of the
|
||||
* medium event description
|
||||
*/
|
||||
Function<BlackboardArtifact, String> getMedExtractor();
|
||||
|
||||
/**
|
||||
* @return a function from an artifact to a String to use as part of the
|
||||
* short event description
|
||||
*/
|
||||
Function<BlackboardArtifact, String> getShortExtractor();
|
||||
|
||||
/**
|
||||
* bundles the per event information derived from a BlackBoard Artifact into
|
||||
* one object. Primarily used to have a single return value for
|
||||
* ArtifactEventType#buildEventDescription(ArtifactEventType, BlackboardArtifact).
|
||||
*/
|
||||
static class AttributeEventDescription {
|
||||
|
||||
final private long time;
|
||||
|
||||
public long getTime() {
|
||||
return time;
|
||||
}
|
||||
|
||||
public String getShortDescription() {
|
||||
return shortDescription;
|
||||
}
|
||||
|
||||
public String getMedDescription() {
|
||||
return medDescription;
|
||||
}
|
||||
|
||||
public String getFullDescription() {
|
||||
return fullDescription;
|
||||
}
|
||||
|
||||
final private String shortDescription;
|
||||
|
||||
final private String medDescription;
|
||||
|
||||
final private String fullDescription;
|
||||
|
||||
public AttributeEventDescription(long time, String shortDescription,
|
||||
String medDescription,
|
||||
String fullDescription) {
|
||||
this.time = time;
|
||||
this.shortDescription = shortDescription;
|
||||
this.medDescription = medDescription;
|
||||
this.fullDescription = fullDescription;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a AttributeEventDescription derived from a BlackboardArtifact. This
|
||||
* is a template method that relies on each ArtifactEventType's
|
||||
* implementation of ArtifactEventType#parseAttributesHelper() to know how
|
||||
* to go from BlackboardAttributes to the event description.
|
||||
*
|
||||
* @param type
|
||||
* @param artf the BlackboardArtifact to derive the event description from
|
||||
*
|
||||
* @return an AttributeEventDescription derived from the given artifact, if
|
||||
* the given artifact has no timestamp
|
||||
*
|
||||
* @throws TskCoreException is there is a problem accessing the blackboard
|
||||
* data
|
||||
*/
|
||||
static public AttributeEventDescription buildEventDescription(ArtifactEventType type, BlackboardArtifact artf) throws TskCoreException {
|
||||
//if we got passed an artifact that doesn't correspond to the type of the event,
|
||||
//something went very wrong. throw an exception.
|
||||
if (type.getArtifactTypeID() != artf.getArtifactTypeID()) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
if (artf.getAttribute(type.getDateTimeAttributeType()) == null) {
|
||||
LOGGER.log(Level.WARNING, "Artifact {0} has no date/time attribute, skipping it.", artf.getArtifactID()); // NON-NLS
|
||||
return null;
|
||||
}
|
||||
//use the hook provided by this subtype implementation
|
||||
return type.parseAttributesHelper(artf);
|
||||
}
|
||||
|
||||
static class AttributeExtractor implements Function<BlackboardArtifact, String> {
|
||||
|
||||
public String apply(BlackboardArtifact artf) {
|
||||
return Optional.ofNullable(getAttributeSafe(artf, attributeType))
|
||||
.map(BlackboardAttribute::getDisplayString)
|
||||
.map(StringUtils::defaultString)
|
||||
.orElse("");
|
||||
}
|
||||
|
||||
private final BlackboardAttribute.Type attributeType;
|
||||
|
||||
public AttributeExtractor(BlackboardAttribute.Type attribute) {
|
||||
this.attributeType = attribute;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class EmptyExtractor implements Function<BlackboardArtifact, String> {
|
||||
|
||||
@Override
|
||||
public String apply(BlackboardArtifact t) {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
static BlackboardAttribute getAttributeSafe(BlackboardArtifact artf, BlackboardAttribute.Type attrType) {
|
||||
try {
|
||||
return artf.getAttribute(attrType);
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting attribute from artifact {0}.", artf.getArtifactID()), ex); // NON-NLS
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,110 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import javafx.scene.image.Image;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
|
||||
/**
|
||||
* RootTypes are event types that have no super type.
|
||||
*/
|
||||
public enum BaseTypes implements EventType {
|
||||
|
||||
FILE_SYSTEM(NbBundle.getMessage(BaseTypes.class, "BaseTypes.fileSystem.name"), "blue-document.png") { // NON-NLS
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Arrays.asList(FileSystemTypes.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return FileSystemTypes.valueOf(string);
|
||||
}
|
||||
},
|
||||
WEB_ACTIVITY(NbBundle.getMessage(BaseTypes.class, "BaseTypes.webActivity.name"), "web-file.png") { // NON-NLS
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Arrays.asList(WebTypes.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return WebTypes.valueOf(string);
|
||||
}
|
||||
},
|
||||
MISC_TYPES(NbBundle.getMessage(BaseTypes.class, "BaseTypes.miscTypes.name"), "block.png") { // NON-NLS
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Arrays.asList(MiscTypes.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return MiscTypes.valueOf(string);
|
||||
}
|
||||
};
|
||||
|
||||
private final String displayName;
|
||||
|
||||
private final String iconBase;
|
||||
|
||||
private final Image image;
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return image;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIconBase() {
|
||||
return iconBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventTypeZoomLevel getZoomLevel() {
|
||||
return EventTypeZoomLevel.BASE_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
private BaseTypes(String displayName, String iconBase) {
|
||||
this.displayName = displayName;
|
||||
this.iconBase = iconBase;
|
||||
this.image = new Image("org/sleuthkit/autopsy/timeline/images/" + iconBase, true); // NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSuperType() {
|
||||
return RootEventType.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return BaseTypes.valueOf(string);
|
||||
}
|
||||
}
|
@ -1,22 +0,0 @@
|
||||
BaseTypes.fileSystem.name=File System
|
||||
BaseTypes.webActivity.name=Web Activity
|
||||
BaseTypes.miscTypes.name=Misc Types
|
||||
FileSystemTypes.fileModified.name=File Modified
|
||||
FileSystemTypes.fileAccessed.name=File Accessed
|
||||
FileSystemTypes.fileCreated.name=File Created
|
||||
FileSystemTypes.fileChanged.name=File Changed
|
||||
MiscTypes.message.name=Messages
|
||||
MiscTypes.GPSRoutes.name=GPS Routes
|
||||
MiscTypes.GPSTrackpoint.name=Location History
|
||||
MiscTypes.Calls.name=Calls
|
||||
MiscTypes.Email.name=Email
|
||||
MiscTypes.recentDocuments.name=Recent Documents
|
||||
MiscTypes.installedPrograms.name=Installed Programs
|
||||
MiscTypes.exif.name=Exif
|
||||
MiscTypes.devicesAttached.name=Devices Attached
|
||||
RootEventType.eventTypes.name=Event Types
|
||||
WebTypes.webDownloads.name=Web Downloads
|
||||
WebTypes.webCookies.name=Web Cookies
|
||||
WebTypes.webBookmarks.name=Web Bookmarks
|
||||
WebTypes.webHistory.name=Web History
|
||||
WebTypes.webSearch.name=Web Searches
|
@ -1,22 +0,0 @@
|
||||
BaseTypes.fileSystem.name=\u30D5\u30A1\u30A4\u30EB\u30B7\u30B9\u30C6\u30E0
|
||||
BaseTypes.miscTypes.name=\u305D\u306E\u4ED6\u30BF\u30A4\u30D7
|
||||
BaseTypes.webActivity.name=\u30A6\u30A7\u30D6\u30A2\u30AF\u30C6\u30A3\u30D3\u30C6\u30A3
|
||||
FileSystemTypes.fileAccessed.name=\u30A2\u30AF\u30BB\u30B9\u3055\u308C\u305F\u30D5\u30A1\u30A4\u30EB
|
||||
FileSystemTypes.fileChanged.name=\u5909\u66F4\u3055\u308C\u305F\u30D5\u30A1\u30A4\u30EB
|
||||
FileSystemTypes.fileCreated.name=\u4F5C\u6210\u3055\u308C\u305F\u30D5\u30A1\u30A4\u30EB
|
||||
FileSystemTypes.fileModified.name=\u4FEE\u6B63\u3055\u308C\u305F\u30D5\u30A1\u30A4\u30EB
|
||||
MiscTypes.Calls.name=\u30B3\u30FC\u30EB
|
||||
MiscTypes.devicesAttached.name=\u63A5\u7D9A\u3055\u308C\u3066\u3044\u308B\u6A5F\u5668
|
||||
MiscTypes.Email.name=Email
|
||||
MiscTypes.exif.name=Exif
|
||||
MiscTypes.GPSRoutes.name=GPS\u30EB\u30FC\u30C8
|
||||
MiscTypes.GPSTrackpoint.name=\u4F4D\u7F6E\u60C5\u5831\u5C65\u6B74
|
||||
MiscTypes.installedPrograms.name=\u30A4\u30F3\u30B9\u30C8\u30FC\u30EB\u3055\u308C\u3066\u3044\u308B\u30D7\u30ED\u30B0\u30E9\u30E0
|
||||
MiscTypes.message.name=\u30E1\u30C3\u30BB\u30FC\u30B8
|
||||
MiscTypes.recentDocuments.name=\u6700\u8FD1\u306E\u30C9\u30AD\u30E5\u30E1\u30F3\u30C8
|
||||
RootEventType.eventTypes.name=\u30A4\u30D9\u30F3\u30C8\u30BF\u30A4\u30D7
|
||||
WebTypes.webBookmarks.name=\u30A6\u30A7\u30D6\u30D6\u30C3\u30AF\u30DE\u30FC\u30AF
|
||||
WebTypes.webCookies.name=\u30A6\u30A7\u30D6\u30AF\u30C3\u30AD\u30FC
|
||||
WebTypes.webDownloads.name=\u30A6\u30A7\u30D6\u30C0\u30A6\u30F3\u30ED\u30FC\u30C9
|
||||
WebTypes.webHistory.name=\u30A6\u30A7\u30D6\u5C65\u6B74
|
||||
WebTypes.webSearch.name=\u30A6\u30A7\u30D6\u691C\u7D22
|
@ -1,110 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.paint.Color;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
|
||||
/**
|
||||
* An Event Type represents a distinct kind of event ie file system or web
|
||||
* activity. An EventType may have an optional super-type and 0 or more
|
||||
* subtypes, allowing events to be organized in a type hierarchy.
|
||||
*/
|
||||
public interface EventType {
|
||||
|
||||
final static List<? extends EventType> allTypes = RootEventType.getInstance().getSubTypesRecusive();
|
||||
|
||||
static Comparator<EventType> getComparator() {
|
||||
return Comparator.comparing(EventType.allTypes::indexOf);
|
||||
|
||||
}
|
||||
|
||||
default BaseTypes getBaseType() {
|
||||
if (this instanceof BaseTypes) {
|
||||
return (BaseTypes) this;
|
||||
} else {
|
||||
return getSuperType().getBaseType();
|
||||
}
|
||||
}
|
||||
|
||||
default List<? extends EventType> getSubTypesRecusive() {
|
||||
ArrayList<EventType> flatList = new ArrayList<>();
|
||||
|
||||
for (EventType et : getSubTypes()) {
|
||||
flatList.add(et);
|
||||
flatList.addAll(et.getSubTypesRecusive());
|
||||
}
|
||||
return flatList;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the color used to represent this event type visually
|
||||
*/
|
||||
default Color getColor() {
|
||||
|
||||
Color baseColor = this.getSuperType().getColor();
|
||||
int siblings = getSuperType().getSiblingTypes().stream().max((
|
||||
EventType t, EventType t1)
|
||||
-> Integer.compare(t.getSubTypes().size(), t1.getSubTypes().size()))
|
||||
.get().getSubTypes().size() + 1;
|
||||
int superSiblings = this.getSuperType().getSiblingTypes().size();
|
||||
|
||||
double offset = (360.0 / superSiblings) / siblings;
|
||||
final Color deriveColor = baseColor.deriveColor(ordinal() * offset, 1, 1, 1);
|
||||
|
||||
return Color.hsb(deriveColor.getHue(), deriveColor.getSaturation(), deriveColor.getBrightness());
|
||||
|
||||
}
|
||||
|
||||
default List<? extends EventType> getSiblingTypes() {
|
||||
return this.getSuperType().getSubTypes();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the super type of this event
|
||||
*/
|
||||
public EventType getSuperType();
|
||||
|
||||
public EventTypeZoomLevel getZoomLevel();
|
||||
|
||||
/**
|
||||
* @return a list of event types, one for each subtype of this eventype, or
|
||||
* an empty list if this event type has no subtypes
|
||||
*/
|
||||
public List<? extends EventType> getSubTypes();
|
||||
|
||||
/*
|
||||
* return the name of the icon file for this type, it will be resolved in
|
||||
* the org/sleuthkit/autopsy/timeline/images
|
||||
*/
|
||||
public String getIconBase();
|
||||
|
||||
public String getDisplayName();
|
||||
|
||||
public EventType getSubType(String string);
|
||||
|
||||
public Image getFXImage();
|
||||
|
||||
public int ordinal();
|
||||
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import javafx.scene.image.Image;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public enum FileSystemTypes implements EventType {
|
||||
|
||||
FILE_MODIFIED(NbBundle.getMessage(FileSystemTypes.class, "FileSystemTypes.fileModified.name"), "blue-document-attribute-m.png"), // NON-NLS
|
||||
FILE_ACCESSED(NbBundle.getMessage(FileSystemTypes.class, "FileSystemTypes.fileAccessed.name"), "blue-document-attribute-a.png"), // NON-NLS
|
||||
FILE_CREATED(NbBundle.getMessage(FileSystemTypes.class, "FileSystemTypes.fileCreated.name"), "blue-document-attribute-b.png"), // NON-NLS
|
||||
FILE_CHANGED(NbBundle.getMessage(FileSystemTypes.class, "FileSystemTypes.fileChanged.name"), "blue-document-attribute-c.png"); // NON-NLS
|
||||
|
||||
private final String iconBase;
|
||||
|
||||
private final Image image;
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return image;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIconBase() {
|
||||
return iconBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventTypeZoomLevel getZoomLevel() {
|
||||
return EventTypeZoomLevel.SUB_TYPE;
|
||||
}
|
||||
|
||||
private final String displayName;
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return FileSystemTypes.valueOf(string);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSuperType() {
|
||||
return BaseTypes.FILE_SYSTEM;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private FileSystemTypes(String displayName, String iconBase) {
|
||||
this.displayName = displayName;
|
||||
this.iconBase = iconBase;
|
||||
this.image = new Image("org/sleuthkit/autopsy/timeline/images/" + iconBase, true); // NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
}
|
@ -1,260 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-16 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.function.Function;
|
||||
import java.util.logging.Level;
|
||||
import javafx.scene.image.Image;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import static org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType.getAttributeSafe;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public enum MiscTypes implements EventType, ArtifactEventType {
|
||||
|
||||
MESSAGE(NbBundle.getMessage(MiscTypes.class, "MiscTypes.message.name"), "message.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_MESSAGE),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE)),
|
||||
artf -> {
|
||||
final BlackboardAttribute dir = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DIRECTION));
|
||||
final BlackboardAttribute readStatus = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_READ_STATUS));
|
||||
final BlackboardAttribute name = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_NAME));
|
||||
final BlackboardAttribute phoneNumber = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER));
|
||||
final BlackboardAttribute subject = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SUBJECT));
|
||||
List<String> asList = Arrays.asList(stringValueOf(dir), stringValueOf(readStatus), name != null || phoneNumber != null ? toFrom(dir) : "", stringValueOf(name != null ? name : phoneNumber), (subject == null ? "" : stringValueOf(subject)));
|
||||
return StringUtils.join(asList, " ");
|
||||
},
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_TEXT))),
|
||||
GPS_ROUTE(NbBundle.getMessage(MiscTypes.class, "MiscTypes.GPSRoutes.name"), "gps-search.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_GPS_ROUTE),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PROG_NAME)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_LOCATION)),
|
||||
artf -> {
|
||||
final BlackboardAttribute latStart = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE_START));
|
||||
final BlackboardAttribute longStart = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE_START));
|
||||
final BlackboardAttribute latEnd = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE_END));
|
||||
final BlackboardAttribute longEnd = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE_END));
|
||||
return String.format("from %1$s %2$s to %3$s %4$s", stringValueOf(latStart), stringValueOf(longStart), stringValueOf(latEnd), stringValueOf(longEnd)); // NON-NLS
|
||||
}),
|
||||
GPS_TRACKPOINT(NbBundle.getMessage(MiscTypes.class, "MiscTypes.GPSTrackpoint.name"), "gps-trackpoint.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_GPS_TRACKPOINT),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PROG_NAME)),
|
||||
artf -> {
|
||||
final BlackboardAttribute longitude = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE));
|
||||
final BlackboardAttribute latitude = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE));
|
||||
return stringValueOf(latitude) + " " + stringValueOf(longitude); // NON-NLS
|
||||
},
|
||||
new EmptyExtractor()),
|
||||
CALL_LOG(NbBundle.getMessage(MiscTypes.class, "MiscTypes.Calls.name"), "calllog.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_CALLLOG),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME_START),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_NAME)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DIRECTION))),
|
||||
EMAIL(NbBundle.getMessage(MiscTypes.class, "MiscTypes.Email.name"), "mail-icon-16.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_EMAIL_MSG),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME_SENT),
|
||||
artf -> {
|
||||
final BlackboardAttribute emailFrom = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_EMAIL_FROM));
|
||||
final BlackboardAttribute emailTo = getAttributeSafe(artf, new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_EMAIL_TO));
|
||||
return stringValueOf(emailFrom) + " to " + stringValueOf(emailTo); // NON-NLS
|
||||
},
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_SUBJECT)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_EMAIL_CONTENT_PLAIN))),
|
||||
RECENT_DOCUMENTS(NbBundle.getMessage(MiscTypes.class, "MiscTypes.recentDocuments.name"), "recent_docs.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_RECENT_OBJECT),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PATH)).andThen(
|
||||
(String t) -> (StringUtils.substringBeforeLast(StringUtils.substringBeforeLast(t, "\\"), "\\"))),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PATH)).andThen(
|
||||
(String t) -> StringUtils.substringBeforeLast(t, "\\")),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PATH))) {
|
||||
|
||||
@Override
|
||||
public AttributeEventDescription parseAttributesHelper(BlackboardArtifact artf) throws TskCoreException {
|
||||
final BlackboardAttribute dateTimeAttr = artf.getAttribute(getDateTimeAttributeType());
|
||||
|
||||
long time = dateTimeAttr.getValueLong();
|
||||
|
||||
//Non-default description construction
|
||||
String shortDescription = getShortExtractor().apply(artf);
|
||||
String medDescription = getMedExtractor().apply(artf);
|
||||
String fullDescription = getFullExtractor().apply(artf);
|
||||
|
||||
return new AttributeEventDescription(time, shortDescription, medDescription, fullDescription);
|
||||
}
|
||||
},
|
||||
INSTALLED_PROGRAM(NbBundle.getMessage(MiscTypes.class, "MiscTypes.installedPrograms.name"), "programs.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_INSTALLED_PROG),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_PROG_NAME)),
|
||||
new EmptyExtractor(),
|
||||
new EmptyExtractor()),
|
||||
EXIF(NbBundle.getMessage(MiscTypes.class, "MiscTypes.exif.name"), "camera-icon-16.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_METADATA_EXIF),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL)),
|
||||
artf -> {
|
||||
try {
|
||||
AbstractFile file = artf.getSleuthkitCase().getAbstractFileById(artf.getObjectID());
|
||||
if (file != null) {
|
||||
return file.getName();
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
LOGGER.log(Level.SEVERE, "Exif event type failed to look up backing file name", ex); //NON-NLS
|
||||
}
|
||||
return "error loading file name";
|
||||
}),
|
||||
DEVICES_ATTACHED(NbBundle.getMessage(MiscTypes.class, "MiscTypes.devicesAttached.name"), "usb_devices.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED),
|
||||
new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(ATTRIBUTE_TYPE.TSK_DEVICE_ID)));
|
||||
|
||||
static public String stringValueOf(BlackboardAttribute attr) {
|
||||
return Optional.ofNullable(attr)
|
||||
.map(BlackboardAttribute::getDisplayString)
|
||||
.orElse("");
|
||||
}
|
||||
|
||||
public static String toFrom(BlackboardAttribute dir) {
|
||||
if (dir == null) {
|
||||
return "";
|
||||
} else {
|
||||
switch (dir.getDisplayString()) {
|
||||
case "Incoming": // NON-NLS
|
||||
return "from"; // NON-NLS
|
||||
case "Outgoing": // NON-NLS
|
||||
return "to"; // NON-NLS
|
||||
default:
|
||||
return ""; // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final BlackboardAttribute.Type dateTimeAttributeType;
|
||||
|
||||
private final String iconBase;
|
||||
|
||||
private final Image image;
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return image;
|
||||
}
|
||||
|
||||
private final Function<BlackboardArtifact, String> longExtractor;
|
||||
|
||||
private final Function<BlackboardArtifact, String> medExtractor;
|
||||
|
||||
private final Function<BlackboardArtifact, String> shortExtractor;
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getFullExtractor() {
|
||||
return longExtractor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getMedExtractor() {
|
||||
return medExtractor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getShortExtractor() {
|
||||
return shortExtractor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlackboardAttribute.Type getDateTimeAttributeType() {
|
||||
return dateTimeAttributeType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventTypeZoomLevel getZoomLevel() {
|
||||
return EventTypeZoomLevel.SUB_TYPE;
|
||||
}
|
||||
|
||||
private final String displayName;
|
||||
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIconBase() {
|
||||
return iconBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return MiscTypes.valueOf(string);
|
||||
}
|
||||
|
||||
private MiscTypes(String displayName, String iconBase, BlackboardArtifact.Type artifactType,
|
||||
BlackboardAttribute.Type dateTimeAttributeType,
|
||||
Function<BlackboardArtifact, String> shortExtractor,
|
||||
Function<BlackboardArtifact, String> medExtractor,
|
||||
Function<BlackboardArtifact, String> longExtractor) {
|
||||
this.displayName = displayName;
|
||||
this.iconBase = iconBase;
|
||||
this.artifactType = artifactType;
|
||||
this.dateTimeAttributeType = dateTimeAttributeType;
|
||||
this.shortExtractor = shortExtractor;
|
||||
this.medExtractor = medExtractor;
|
||||
this.longExtractor = longExtractor;
|
||||
this.image = new Image("org/sleuthkit/autopsy/timeline/images/" + iconBase, true); // NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSuperType() {
|
||||
return BaseTypes.MISC_TYPES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlackboardArtifact.Type getArtifactType() {
|
||||
return artifactType;
|
||||
}
|
||||
|
||||
}
|
@ -1,98 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import javafx.scene.image.Image;
|
||||
import javafx.scene.paint.Color;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
|
||||
/**
|
||||
* A singleton EventType to represent the root type of all event types.
|
||||
*/
|
||||
public class RootEventType implements EventType {
|
||||
|
||||
@Override
|
||||
public List<RootEventType> getSiblingTypes() {
|
||||
return Collections.singletonList(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventTypeZoomLevel getZoomLevel() {
|
||||
return EventTypeZoomLevel.ROOT_TYPE;
|
||||
}
|
||||
|
||||
private RootEventType() {
|
||||
}
|
||||
|
||||
public static RootEventType getInstance() {
|
||||
return RootEventTypeHolder.INSTANCE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return BaseTypes.valueOf(string);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int ordinal() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static class RootEventTypeHolder {
|
||||
|
||||
private static final RootEventType INSTANCE = new RootEventType();
|
||||
|
||||
private RootEventTypeHolder() {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Color getColor() {
|
||||
return Color.hsb(359, .9, .9, 0);
|
||||
}
|
||||
|
||||
@Override
|
||||
public RootEventType getSuperType() {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<BaseTypes> getSubTypes() {
|
||||
return Arrays.asList(BaseTypes.values());
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getIconBase() {
|
||||
throw new UnsupportedOperationException("Not supported yet."); // NON-NLS //To change body of generated methods, choose Tools | Templates.
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return NbBundle.getMessage(this.getClass(), "RootEventType.eventTypes.name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return null;
|
||||
}
|
||||
}
|
@ -1,210 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2014-16 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
|
||||
|
||||
import com.google.common.net.InternetDomainName;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import javafx.scene.image.Image;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public enum WebTypes implements EventType, ArtifactEventType {
|
||||
|
||||
WEB_DOWNLOADS(NbBundle.getMessage(WebTypes.class, "WebTypes.webDownloads.name"),
|
||||
"downloads.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED),
|
||||
TopPrivateDomainExtractor.getInstance(),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL))) {
|
||||
|
||||
@Override
|
||||
public AttributeEventDescription parseAttributesHelper(BlackboardArtifact artf) throws TskCoreException {
|
||||
long time = artf.getAttribute(getDateTimeAttributeType()).getValueLong();
|
||||
String domain = getShortExtractor().apply(artf);
|
||||
String path = getMedExtractor().apply(artf);
|
||||
String fileName = StringUtils.substringAfterLast(path, "/");
|
||||
String url = getFullExtractor().apply(artf);
|
||||
|
||||
//TODO: review non default description construction
|
||||
String shortDescription = fileName + " from " + domain; // NON-NLS
|
||||
String medDescription = fileName + " from " + url; // NON-NLS
|
||||
String fullDescription = path + " from " + url; // NON-NLS
|
||||
return new AttributeEventDescription(time, shortDescription, medDescription, fullDescription);
|
||||
}
|
||||
},
|
||||
//TODO: review description separators
|
||||
WEB_COOKIE(NbBundle.getMessage(WebTypes.class, "WebTypes.webCookies.name"),
|
||||
"cookies.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME),
|
||||
TopPrivateDomainExtractor.getInstance(),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE))),
|
||||
//TODO: review description separators
|
||||
WEB_BOOKMARK(NbBundle.getMessage(WebTypes.class, "WebTypes.webBookmarks.name"),
|
||||
"bookmarks.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED),
|
||||
TopPrivateDomainExtractor.getInstance(),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE))),
|
||||
//TODO: review description separators
|
||||
WEB_HISTORY(NbBundle.getMessage(WebTypes.class, "WebTypes.webHistory.name"),
|
||||
"history.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED),
|
||||
TopPrivateDomainExtractor.getInstance(),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL)),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE))),
|
||||
//TODO: review description separators
|
||||
WEB_SEARCH(NbBundle.getMessage(WebTypes.class, "WebTypes.webSearch.name"),
|
||||
"searchquery.png", // NON-NLS
|
||||
new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY),
|
||||
new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TEXT)),
|
||||
TopPrivateDomainExtractor.getInstance(),
|
||||
new AttributeExtractor(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME)));
|
||||
|
||||
private final BlackboardAttribute.Type dateTimeAttributeType;
|
||||
|
||||
private final String iconBase;
|
||||
|
||||
private final Image image;
|
||||
|
||||
@Override
|
||||
public Image getFXImage() {
|
||||
return image;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlackboardAttribute.Type getDateTimeAttributeType() {
|
||||
return dateTimeAttributeType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventTypeZoomLevel getZoomLevel() {
|
||||
return EventTypeZoomLevel.SUB_TYPE;
|
||||
}
|
||||
|
||||
private final Function<BlackboardArtifact, String> longExtractor;
|
||||
|
||||
private final Function<BlackboardArtifact, String> medExtractor;
|
||||
|
||||
private final Function<BlackboardArtifact, String> shortExtractor;
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getFullExtractor() {
|
||||
return longExtractor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getMedExtractor() {
|
||||
return medExtractor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Function<BlackboardArtifact, String> getShortExtractor() {
|
||||
return shortExtractor;
|
||||
}
|
||||
|
||||
private final String displayName;
|
||||
|
||||
private final BlackboardArtifact.Type artifactType;
|
||||
|
||||
@Override
|
||||
public String getIconBase() {
|
||||
return iconBase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlackboardArtifact.Type getArtifactType() {
|
||||
return artifactType;
|
||||
}
|
||||
|
||||
private WebTypes(String displayName, String iconBase, BlackboardArtifact.Type artifactType,
|
||||
BlackboardAttribute.Type dateTimeAttributeType,
|
||||
Function<BlackboardArtifact, String> shortExtractor,
|
||||
Function<BlackboardArtifact, String> medExtractor,
|
||||
Function<BlackboardArtifact, String> longExtractor) {
|
||||
this.displayName = displayName;
|
||||
this.iconBase = iconBase;
|
||||
this.artifactType = artifactType;
|
||||
this.dateTimeAttributeType = dateTimeAttributeType;
|
||||
this.shortExtractor = shortExtractor;
|
||||
this.medExtractor = medExtractor;
|
||||
this.longExtractor = longExtractor;
|
||||
this.image = new Image("org/sleuthkit/autopsy/timeline/images/" + iconBase, true); // NON-NLS
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSuperType() {
|
||||
return BaseTypes.WEB_ACTIVITY;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventType getSubType(String string) {
|
||||
return WebTypes.valueOf(string);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<? extends EventType> getSubTypes() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
private static class TopPrivateDomainExtractor extends AttributeExtractor {
|
||||
|
||||
final private static TopPrivateDomainExtractor instance = new TopPrivateDomainExtractor();
|
||||
|
||||
static TopPrivateDomainExtractor getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String apply(BlackboardArtifact artf) {
|
||||
String domainString = StringUtils.substringBefore(super.apply(artf), "/");
|
||||
if (InternetDomainName.isValid(domainString)) {
|
||||
InternetDomainName domain = InternetDomainName.from(domainString);
|
||||
return (domain.isUnderPublicSuffix())
|
||||
? domain.topPrivateDomain().toString()
|
||||
: domain.toString();
|
||||
} else {
|
||||
return domainString;
|
||||
}
|
||||
}
|
||||
|
||||
TopPrivateDomainExtractor() {
|
||||
super(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN));
|
||||
}
|
||||
}
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
msgdlg.problem.text=There was a problem populating the timeline. Not all events may be present or accurate.
|
||||
progressWindow.msg.commitingDb=Committing events database
|
||||
progressWindow.msg.gatheringData=Gathering event data
|
||||
progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files
|
||||
progressWindow.msg.refreshingFileTags=Refreshing file tags
|
||||
progressWindow.msg.refreshingResultTags=Refreshing result tags
|
||||
# {0} - event type
|
||||
progressWindow.populatingXevents=Populating {0} events
|
@ -1,7 +0,0 @@
|
||||
msgdlg.problem.text=\u30BF\u30A4\u30E0\u30E9\u30A4\u30F3\u306B\u5165\u529B\u3059\u308B\u969B\u306B\u554F\u984C\u304C\u767A\u751F\u3057\u307E\u3057\u305F\u3002\u5168\u3066\u306E\u30A4\u30D9\u30F3\u30C8\u304C\u5B58\u5728\u3057\u306A\u3044\u304B\u6B63\u78BA\u3067\u306F\u306A\u3044\u304B\u3082\u3057\u308C\u307E\u305B\u3093\u3002
|
||||
progressWindow.msg.commitingDb=\u30C7\u30FC\u30BF\u30D9\u30FC\u30B9\u306B\u30A4\u30D9\u30F3\u30C8\u3092\u30B3\u30DF\u30C3\u30C8\u3057\u3066\u3044\u307E\u3059\u3002
|
||||
progressWindow.msg.gatheringData=\u30A4\u30D9\u30F3\u30C8\u30C7\u30FC\u30BF\u3092\u53CE\u96C6\u4E2D
|
||||
progressWindow.msg.populateMacEventsFiles=\u30D5\u30A1\u30A4\u30EB\u306EMAC\u30BF\u30A4\u30E0\u3092\u5165\u529B\u4E2D
|
||||
progressWindow.msg.refreshingFileTags=\u30D5\u30A1\u30A4\u30EB\u30BF\u30B0\u3092\u30EA\u30D5\u30EC\u30C3\u30B7\u30E5\u4E2D
|
||||
progressWindow.msg.refreshingResultTags=\u7D50\u679C\u30BF\u30B0\u3092\u30EA\u30D5\u30EC\u30C3\u30B7\u30E5\u4E2D
|
||||
progressWindow.populatingXevents={0}\u30A4\u30D9\u30F3\u30C8\u3092\u5165\u529B\u4E2D
|
File diff suppressed because it is too large
Load Diff
@ -1,727 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2011-2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.util.concurrent.ThreadFactoryBuilder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import static java.util.Objects.isNull;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CancellationException;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.logging.Level;
|
||||
import java.util.stream.Collectors;
|
||||
import javafx.application.Platform;
|
||||
import javafx.beans.property.ReadOnlyBooleanProperty;
|
||||
import javafx.beans.property.ReadOnlyBooleanWrapper;
|
||||
import javafx.beans.property.ReadOnlyObjectProperty;
|
||||
import javafx.collections.FXCollections;
|
||||
import javafx.collections.ObservableList;
|
||||
import javafx.collections.ObservableMap;
|
||||
import javafx.concurrent.Worker;
|
||||
import javax.swing.JOptionPane;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.joda.time.Interval;
|
||||
import org.netbeans.api.progress.ProgressHandle;
|
||||
import org.openide.util.NbBundle;
|
||||
import org.openide.windows.WindowManager;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.services.TagsManager;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.timeline.CancellationProgressTask;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.CombinedEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.EventStripe;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.SingleEvent;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.FileSystemTypes;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
|
||||
import org.sleuthkit.datamodel.AbstractFile;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||
import org.sleuthkit.datamodel.BlackboardArtifactTag;
|
||||
import org.sleuthkit.datamodel.Content;
|
||||
import org.sleuthkit.datamodel.ContentTag;
|
||||
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||
import org.sleuthkit.datamodel.Tag;
|
||||
import org.sleuthkit.datamodel.TagName;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Provides higher-level public API (over EventsDB) to access events. In theory
|
||||
* this insulates the rest of the timeline module form the details of the db
|
||||
* implementation. Since there are no other implementations of the database or
|
||||
* clients of this class, and no Java Interface defined yet, in practice this
|
||||
* just delegates everything to the eventDB. Some results are also cached by
|
||||
* this layer.
|
||||
*
|
||||
* Concurrency Policy:
|
||||
*
|
||||
* Since almost everything just delegates to the EventDB, which is internally
|
||||
* synchronized, we only have to worry about rebuildRepository() which we
|
||||
* synchronize on our intrinsic lock.
|
||||
*
|
||||
*/
|
||||
public class EventsRepository {
|
||||
|
||||
private final static Logger logger = Logger.getLogger(EventsRepository.class.getName());
|
||||
|
||||
private final Executor workerExecutor = Executors.newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("eventrepository-worker-%d").build()); //NON-NLS
|
||||
private DBPopulationWorker dbWorker;
|
||||
private final EventDB eventDB;
|
||||
private final Case autoCase;
|
||||
private final FilteredEventsModel modelInstance;
|
||||
|
||||
private final LoadingCache<Object, Long> maxCache;
|
||||
private final LoadingCache<Object, Long> minCache;
|
||||
private final LoadingCache<Long, SingleEvent> idToEventCache;
|
||||
private final LoadingCache<ZoomParams, Map<EventType, Long>> eventCountsCache;
|
||||
private final LoadingCache<ZoomParams, List<EventStripe>> eventStripeCache;
|
||||
|
||||
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
|
||||
private final ObservableMap<Long, String> hashSetMap = FXCollections.observableHashMap();
|
||||
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
|
||||
|
||||
public Case getAutoCase() {
|
||||
return autoCase;
|
||||
}
|
||||
|
||||
public ObservableList<TagName> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
synchronized public ObservableMap<Long, String> getDatasourcesMap() {
|
||||
return datasourcesMap;
|
||||
}
|
||||
|
||||
synchronized public ObservableMap<Long, String> getHashSetMap() {
|
||||
return hashSetMap;
|
||||
}
|
||||
|
||||
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter) {
|
||||
return eventDB.getBoundingEventsInterval(timeRange, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return a FilteredEvetns object with this repository as underlying source
|
||||
* of events
|
||||
*/
|
||||
public FilteredEventsModel getEventsModel() {
|
||||
return modelInstance;
|
||||
}
|
||||
|
||||
public EventsRepository(Case autoCase, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
|
||||
this.autoCase = autoCase;
|
||||
//TODO: we should check that case is open, or get passed a case object/directory -jm
|
||||
this.eventDB = EventDB.getEventDB(autoCase);
|
||||
populateFilterData(autoCase.getSleuthkitCase());
|
||||
idToEventCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(5000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
.build(CacheLoader.from(eventDB::getEventById));
|
||||
eventCountsCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(1000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES)
|
||||
.build(CacheLoader.from(eventDB::countEventsByType));
|
||||
eventStripeCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(1000L)
|
||||
.expireAfterAccess(10, TimeUnit.MINUTES
|
||||
).build(CacheLoader.from(eventDB::getEventStripes));
|
||||
maxCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMaxTime));
|
||||
minCache = CacheBuilder.newBuilder().build(CacheLoader.from(eventDB::getMinTime));
|
||||
this.modelInstance = new FilteredEventsModel(this, currentStateProperty);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return min time (in seconds from unix epoch)
|
||||
*/
|
||||
public Long getMaxTime() {
|
||||
return maxCache.getUnchecked("max"); // NON-NLS
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* @return max tie (in seconds from unix epoch)
|
||||
*/
|
||||
public Long getMinTime() {
|
||||
return minCache.getUnchecked("min"); // NON-NLS
|
||||
|
||||
}
|
||||
|
||||
public SingleEvent getEventById(Long eventID) {
|
||||
return idToEventCache.getUnchecked(eventID);
|
||||
}
|
||||
|
||||
synchronized public Set<SingleEvent> getEventsById(Collection<Long> eventIDs) {
|
||||
return eventIDs.stream()
|
||||
.map(idToEventCache::getUnchecked)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
}
|
||||
|
||||
synchronized public List<EventStripe> getEventStripes(ZoomParams params) {
|
||||
try {
|
||||
return eventStripeCache.get(params);
|
||||
} catch (ExecutionException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to load Event Stripes from cache for " + params.toString(), ex); //NON-NLS
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public Map<EventType, Long> countEvents(ZoomParams params) {
|
||||
return eventCountsCache.getUnchecked(params);
|
||||
}
|
||||
|
||||
synchronized public int countAllEvents() {
|
||||
return eventDB.countAllEvents();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a List of event IDs for the events that are derived from the given
|
||||
* file.
|
||||
*
|
||||
* @param file The AbstractFile to get derived event IDs
|
||||
* for.
|
||||
* @param includeDerivedArtifacts If true, also get event IDs for events
|
||||
* derived from artifacts derived form this
|
||||
* file. If false, only gets events derived
|
||||
* directly from this file (file system
|
||||
* timestamps).
|
||||
*
|
||||
* @return A List of event IDs for the events that are derived from the
|
||||
* given file.
|
||||
*/
|
||||
public List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) {
|
||||
return eventDB.getEventIDsForFile(file, includeDerivedArtifacts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a List of event IDs for the events that are derived from the given
|
||||
* artifact.
|
||||
*
|
||||
* @param artifact The BlackboardArtifact to get derived event IDs for.
|
||||
*
|
||||
* @return A List of event IDs for the events that are derived from the
|
||||
* given artifact.
|
||||
*/
|
||||
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) {
|
||||
return eventDB.getEventIDsForArtifact(artifact);
|
||||
}
|
||||
|
||||
private void invalidateCaches() {
|
||||
minCache.invalidateAll();
|
||||
maxCache.invalidateAll();
|
||||
eventCountsCache.invalidateAll();
|
||||
eventStripeCache.invalidateAll();
|
||||
idToEventCache.invalidateAll();
|
||||
}
|
||||
|
||||
public List<Long> getEventIDs(Interval timeRange, RootFilter filter) {
|
||||
return eventDB.getEventIDs(timeRange, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a representation of all the events, within the given time range, that
|
||||
* pass the given filter, grouped by time and description such that file
|
||||
* system events for the same file, with the same timestamp, are combined
|
||||
* together.
|
||||
*
|
||||
* @param timeRange The Interval that all returned events must be within.
|
||||
* @param filter The Filter that all returned events must pass.
|
||||
*
|
||||
* @return A List of combined events, sorted by timestamp.
|
||||
*/
|
||||
public List<CombinedEvent> getCombinedEvents(Interval timeRange, RootFilter filter) {
|
||||
return eventDB.getCombinedEvents(timeRange, filter);
|
||||
}
|
||||
|
||||
public Interval getSpanningInterval(Collection<Long> eventIDs) {
|
||||
return eventDB.getSpanningInterval(eventIDs);
|
||||
}
|
||||
|
||||
public boolean hasNewColumns() {
|
||||
return eventDB.hasNewColumns();
|
||||
}
|
||||
|
||||
/**
|
||||
* get a count of tagnames applied to the given event ids as a map from
|
||||
* tagname displayname to count of tag applications
|
||||
*
|
||||
* @param eventIDsWithTags the event ids to get the tag counts map for
|
||||
*
|
||||
* @return a map from tagname displayname to count of applications
|
||||
*/
|
||||
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
|
||||
return eventDB.getTagCountsByTagName(eventIDsWithTags);
|
||||
}
|
||||
|
||||
/**
|
||||
* use the given SleuthkitCase to update the data used to determine the
|
||||
* available filters.
|
||||
*
|
||||
* @param skCase
|
||||
*/
|
||||
synchronized private void populateFilterData(SleuthkitCase skCase) {
|
||||
|
||||
for (Map.Entry<Long, String> hashSet : eventDB.getHashSetNames().entrySet()) {
|
||||
hashSetMap.putIfAbsent(hashSet.getKey(), hashSet.getValue());
|
||||
}
|
||||
//because there is no way to remove a datasource we only add to this map.
|
||||
for (Long id : eventDB.getDataSourceIDs()) {
|
||||
try {
|
||||
datasourcesMap.putIfAbsent(id, skCase.getContentById(id).getDataSource().getName());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get datasource by ID.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
//should this only be tags applied to files or event bearing artifacts?
|
||||
tagNames.setAll(skCase.getTagNamesInUse());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get tag names in use.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag, EventDB.EventTransaction trans) {
|
||||
Set<Long> updatedEventIDs = eventDB.addTag(objID, artifactID, tag, trans);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) {
|
||||
Set<Long> updatedEventIDs = eventDB.deleteTag(objID, artifactID, tagID, tagged);
|
||||
if (!updatedEventIDs.isEmpty()) {
|
||||
invalidateCaches(updatedEventIDs);
|
||||
}
|
||||
return updatedEventIDs;
|
||||
}
|
||||
|
||||
synchronized private void invalidateCaches(Set<Long> updatedEventIDs) {
|
||||
eventCountsCache.invalidateAll();
|
||||
eventStripeCache.invalidateAll();
|
||||
idToEventCache.invalidateAll(updatedEventIDs);
|
||||
try {
|
||||
tagNames.setAll(autoCase.getSleuthkitCase().getTagNamesInUse());
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Failed to get tag names in use.", ex); //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* "sync" the given tags filter with the tagnames in use: Disable filters
|
||||
* for tags that are not in use in the case, and add new filters for tags
|
||||
* that don't have them. New filters are selected by default.
|
||||
*
|
||||
* @param tagsFilter the tags filter to modify so it is consistent with the
|
||||
* tags in use in the case
|
||||
*/
|
||||
public void syncTagsFilter(TagsFilter tagsFilter) {
|
||||
for (TagName t : tagNames) {
|
||||
tagsFilter.addSubFilter(new TagNameFilter(t, autoCase));
|
||||
}
|
||||
for (TagNameFilter t : tagsFilter.getSubFilters()) {
|
||||
t.setDisabled(tagNames.contains(t.getTagName()) == false);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean areFiltersEquivalent(RootFilter f1, RootFilter f2) {
|
||||
return SQLHelper.getSQLWhere(f1).equals(SQLHelper.getSQLWhere(f2));
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* rebuild the entire repo.
|
||||
*
|
||||
* @param onStateChange called when he background task changes state.
|
||||
* Clients can use this to handle failure, or cleanup
|
||||
* operations for example.
|
||||
*
|
||||
* @return the task that will rebuild the repo in a background thread. The
|
||||
* task has already been started.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
public CancellationProgressTask<Void> rebuildRepository(Consumer<Worker.State> onStateChange) {
|
||||
return rebuildRepository(DBPopulationMode.FULL, onStateChange);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* drop and rebuild the tags in the repo.
|
||||
*
|
||||
* @param onStateChange called when he background task changes state.
|
||||
* Clients can use this to handle failure, or cleanup
|
||||
* operations for example.
|
||||
*
|
||||
* @return the task that will rebuild the repo in a background thread. The
|
||||
* task has already been started.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
public CancellationProgressTask<Void> rebuildTags(Consumer<Worker.State> onStateChange) {
|
||||
return rebuildRepository(DBPopulationMode.TAGS_ONLY, onStateChange);
|
||||
}
|
||||
|
||||
/**
|
||||
* rebuild the repo.
|
||||
*
|
||||
* @param mode the rebuild mode to use.
|
||||
* @param onStateChange called when he background task changes state.
|
||||
* Clients can use this to handle failure, or cleanup
|
||||
* operations for example.
|
||||
*
|
||||
* @return the task that will rebuild the repo in a background thread. The
|
||||
* task has already been started.
|
||||
*/
|
||||
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
|
||||
private CancellationProgressTask<Void> rebuildRepository(final DBPopulationMode mode, Consumer<Worker.State> onStateChange) {
|
||||
logger.log(Level.INFO, "(re)starting {0} db population task", mode); //NON-NLS
|
||||
if (dbWorker != null) {
|
||||
dbWorker.cancel();
|
||||
}
|
||||
dbWorker = new DBPopulationWorker(mode, onStateChange);
|
||||
workerExecutor.execute(dbWorker);
|
||||
return dbWorker;
|
||||
}
|
||||
|
||||
private enum DBPopulationMode {
|
||||
|
||||
FULL,
|
||||
TAGS_ONLY;
|
||||
}
|
||||
|
||||
/**
|
||||
* //TODO: I don't like the coupling to ProgressHandle in this task, but
|
||||
* the alternatives I can think of seem even worse. -jm
|
||||
*/
|
||||
private class DBPopulationWorker extends CancellationProgressTask<Void> {
|
||||
|
||||
private final ReadOnlyBooleanWrapper cancellable = new ReadOnlyBooleanWrapper(true);
|
||||
|
||||
private final DBPopulationMode dbPopulationMode;
|
||||
private final SleuthkitCase skCase;
|
||||
private final TagsManager tagsManager;
|
||||
|
||||
private ProgressHandle progressHandle;
|
||||
|
||||
@Override
|
||||
public ReadOnlyBooleanProperty cancellableProperty() {
|
||||
return cancellable.getReadOnlyProperty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean requestCancel() {
|
||||
Platform.runLater(() -> cancellable.set(false));
|
||||
return super.requestCancel();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateTitle(String title) {
|
||||
super.updateTitle(title);
|
||||
progressHandle.setDisplayName(title);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateMessage(String message) {
|
||||
super.updateMessage(message);
|
||||
progressHandle.progress(message);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateProgress(double workDone, double max) {
|
||||
super.updateProgress(workDone, max);
|
||||
if (workDone >= 0) {
|
||||
progressHandle.progress((int) workDone);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateProgress(long workDone, long max) {
|
||||
super.updateProgress(workDone, max);
|
||||
super.updateProgress(workDone, max);
|
||||
if (workDone >= 0) {
|
||||
progressHandle.progress((int) workDone);
|
||||
}
|
||||
}
|
||||
|
||||
DBPopulationWorker(DBPopulationMode mode, Consumer<Worker.State> onStateChange) {
|
||||
skCase = autoCase.getSleuthkitCase();
|
||||
tagsManager = autoCase.getServices().getTagsManager();
|
||||
this.dbPopulationMode = mode;
|
||||
this.stateProperty().addListener(stateObservable -> onStateChange.accept(getState()));
|
||||
}
|
||||
|
||||
void restartProgressHandle(String title, String message, Double workDone, double total, Boolean cancellable) {
|
||||
if (progressHandle != null) {
|
||||
progressHandle.finish();
|
||||
}
|
||||
progressHandle = cancellable
|
||||
? ProgressHandle.createHandle(title, this::requestCancel)
|
||||
: ProgressHandle.createHandle(title);
|
||||
|
||||
if (workDone < 0) {
|
||||
progressHandle.start();
|
||||
} else {
|
||||
progressHandle.start((int) total);
|
||||
}
|
||||
updateTitle(title);
|
||||
updateMessage(message);
|
||||
updateProgress(workDone, total);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation") // TODO (EUR-733): Do not use SleuthkitCase.getLastObjectId
|
||||
@Override
|
||||
@NbBundle.Messages({"progressWindow.msg.refreshingFileTags=Refreshing file tags",
|
||||
"progressWindow.msg.refreshingResultTags=Refreshing result tags",
|
||||
"progressWindow.msg.gatheringData=Gathering event data",
|
||||
"progressWindow.msg.commitingDb=Committing events database"})
|
||||
protected Void call() throws Exception {
|
||||
EventDB.EventTransaction trans = null;
|
||||
|
||||
if (dbPopulationMode == DBPopulationMode.FULL) {
|
||||
//drop old db, and add back MAC and artifact events
|
||||
logger.log(Level.INFO, "Beginning population of timeline db."); // NON-NLS
|
||||
restartProgressHandle(Bundle.progressWindow_msg_gatheringData(), "", -1D, 1, true);
|
||||
//reset database //TODO: can we do more incremental updates? -jm
|
||||
eventDB.reInitializeDB();
|
||||
//grab ids of all files
|
||||
List<Long> fileIDs = skCase.findAllFileIdsWhere("name != '.' AND name != '..'" +
|
||||
" AND type != " + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.ordinal()); //NON-NLS
|
||||
final int numFiles = fileIDs.size();
|
||||
|
||||
trans = eventDB.beginTransaction();
|
||||
insertMACTimeEvents(numFiles, fileIDs, trans);
|
||||
insertArtifactDerivedEvents(trans);
|
||||
}
|
||||
|
||||
//tags
|
||||
if (dbPopulationMode == DBPopulationMode.TAGS_ONLY) {
|
||||
trans = eventDB.beginTransaction();
|
||||
logger.log(Level.INFO, "dropping old tags"); // NON-NLS
|
||||
eventDB.reInitializeTags();
|
||||
}
|
||||
|
||||
logger.log(Level.INFO, "updating content tags"); // NON-NLS
|
||||
List<ContentTag> contentTags = tagsManager.getAllContentTags();
|
||||
int currentWorkTotal = contentTags.size();
|
||||
restartProgressHandle(Bundle.progressWindow_msg_refreshingFileTags(), "", 0D, currentWorkTotal, true);
|
||||
insertContentTags(currentWorkTotal, contentTags, trans);
|
||||
|
||||
logger.log(Level.INFO, "updating artifact tags"); // NON-NLS
|
||||
List<BlackboardArtifactTag> artifactTags = tagsManager.getAllBlackboardArtifactTags();
|
||||
currentWorkTotal = artifactTags.size();
|
||||
restartProgressHandle(Bundle.progressWindow_msg_refreshingResultTags(), "", 0D, currentWorkTotal, true);
|
||||
insertArtifactTags(currentWorkTotal, artifactTags, trans);
|
||||
|
||||
logger.log(Level.INFO, "committing db"); // NON-NLS
|
||||
Platform.runLater(() -> cancellable.set(false));
|
||||
restartProgressHandle(Bundle.progressWindow_msg_commitingDb(), "", -1D, 1, false);
|
||||
eventDB.commitTransaction(trans);
|
||||
|
||||
eventDB.analyze();
|
||||
populateFilterData(skCase);
|
||||
invalidateCaches();
|
||||
|
||||
progressHandle.finish();
|
||||
if (isCancelRequested()) {
|
||||
cancel();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void insertArtifactTags(int currentWorkTotal, List<BlackboardArtifactTag> artifactTags, EventDB.EventTransaction trans) {
|
||||
for (int i = 0; i < currentWorkTotal; i++) {
|
||||
if (isCancelRequested()) {
|
||||
break;
|
||||
}
|
||||
updateProgress(i, currentWorkTotal);
|
||||
BlackboardArtifactTag artifactTag = artifactTags.get(i);
|
||||
eventDB.addTag(artifactTag.getContent().getId(), artifactTag.getArtifact().getArtifactID(), artifactTag, trans);
|
||||
}
|
||||
}
|
||||
|
||||
private void insertContentTags(int currentWorkTotal, List<ContentTag> contentTags, EventDB.EventTransaction trans) {
|
||||
for (int i = 0; i < currentWorkTotal; i++) {
|
||||
if (isCancelRequested()) {
|
||||
break;
|
||||
}
|
||||
updateProgress(i, currentWorkTotal);
|
||||
ContentTag contentTag = contentTags.get(i);
|
||||
eventDB.addTag(contentTag.getContent().getId(), null, contentTag, trans);
|
||||
}
|
||||
}
|
||||
|
||||
private void insertArtifactDerivedEvents(EventDB.EventTransaction trans) {
|
||||
//insert artifact based events
|
||||
//TODO: use (not-yet existing api) to grab all artifacts with timestamps, rather than the hardcoded lists in EventType -jm
|
||||
for (EventType type : RootEventType.allTypes) {
|
||||
if (isCancelRequested()) {
|
||||
break;
|
||||
}
|
||||
//skip file_system events, they are already handled above.
|
||||
if (type instanceof ArtifactEventType) {
|
||||
populateEventType((ArtifactEventType) type, trans);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NbBundle.Messages("progressWindow.msg.populateMacEventsFiles=Populating MAC time events for files")
|
||||
private void insertMACTimeEvents(final int numFiles, List<Long> fileIDs, EventDB.EventTransaction trans) {
|
||||
restartProgressHandle(Bundle.progressWindow_msg_populateMacEventsFiles(), "", 0D, numFiles, true);
|
||||
for (int i = 0; i < numFiles; i++) {
|
||||
if (isCancelRequested()) {
|
||||
break;
|
||||
}
|
||||
long fID = fileIDs.get(i);
|
||||
try {
|
||||
AbstractFile f = skCase.getAbstractFileById(fID);
|
||||
|
||||
if (isNull(f)) {
|
||||
logger.log(Level.WARNING, "Failed to get data for file : {0}", fID); // NON-NLS
|
||||
} else {
|
||||
insertEventsForFile(f, trans);
|
||||
updateProgress(i, numFiles);
|
||||
updateMessage(f.getName());
|
||||
}
|
||||
} catch (TskCoreException tskCoreException) {
|
||||
logger.log(Level.SEVERE, "Failed to insert MAC time events for file : " + fID, tskCoreException); // NON-NLS
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void insertEventsForFile(AbstractFile f, EventDB.EventTransaction trans) throws TskCoreException {
|
||||
//gather time stamps into map
|
||||
EnumMap<FileSystemTypes, Long> timeMap = new EnumMap<>(FileSystemTypes.class);
|
||||
timeMap.put(FileSystemTypes.FILE_CREATED, f.getCrtime());
|
||||
timeMap.put(FileSystemTypes.FILE_ACCESSED, f.getAtime());
|
||||
timeMap.put(FileSystemTypes.FILE_CHANGED, f.getCtime());
|
||||
timeMap.put(FileSystemTypes.FILE_MODIFIED, f.getMtime());
|
||||
|
||||
/*
|
||||
* if there are no legitimate ( greater than zero ) time stamps (
|
||||
* eg, logical/local files) skip the rest of the event generation:
|
||||
* this should result in dropping logical files, since they do not
|
||||
* have legitimate time stamps.
|
||||
*/
|
||||
if (Collections.max(timeMap.values()) > 0) {
|
||||
final String uniquePath = f.getUniquePath();
|
||||
final String parentPath = f.getParentPath();
|
||||
long datasourceID = f.getDataSource().getId();
|
||||
String datasourceName = StringUtils.substringBeforeLast(uniquePath, parentPath);
|
||||
|
||||
String rootFolder = StringUtils.substringBefore(StringUtils.substringAfter(parentPath, "/"), "/");
|
||||
String shortDesc = datasourceName + "/" + StringUtils.defaultString(rootFolder);
|
||||
shortDesc = shortDesc.endsWith("/") ? shortDesc : shortDesc + "/";
|
||||
String medDesc = datasourceName + parentPath;
|
||||
|
||||
final TskData.FileKnown known = f.getKnown();
|
||||
Set<String> hashSets = f.getHashSetNames();
|
||||
List<ContentTag> tags = tagsManager.getContentTagsByContent(f);
|
||||
|
||||
for (Map.Entry<FileSystemTypes, Long> timeEntry : timeMap.entrySet()) {
|
||||
if (timeEntry.getValue() > 0) {
|
||||
// if the time is legitimate ( greater than zero ) insert it
|
||||
eventDB.insertEvent(timeEntry.getValue(), timeEntry.getKey(),
|
||||
datasourceID, f.getId(), null, uniquePath, medDesc,
|
||||
shortDesc, known, hashSets, tags, trans);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@NbBundle.Messages("msgdlg.problem.text=There was a problem populating the timeline."
|
||||
+ " Not all events may be present or accurate.")
|
||||
protected void done() {
|
||||
super.done();
|
||||
try {
|
||||
get();
|
||||
} catch (CancellationException ex) {
|
||||
logger.log(Level.WARNING, "Timeline database population was cancelled by the user. " //NON-NLS
|
||||
+ " Not all events may be present or accurate."); // NON-NLS
|
||||
} catch (Exception ex) {
|
||||
logger.log(Level.WARNING, "Unexpected exception while populating database.", ex); // NON-NLS
|
||||
JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), Bundle.msgdlg_problem_text());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* populate all the events of one type
|
||||
*
|
||||
* @param type the type to populate
|
||||
* @param trans the db transaction to use
|
||||
*/
|
||||
@NbBundle.Messages({"# {0} - event type ", "progressWindow.populatingXevents=Populating {0} events"})
|
||||
private void populateEventType(final ArtifactEventType type, EventDB.EventTransaction trans) {
|
||||
try {
|
||||
//get all the blackboard artifacts corresponding to the given event sub_type
|
||||
final ArrayList<BlackboardArtifact> blackboardArtifacts = skCase.getBlackboardArtifacts(type.getArtifactTypeID());
|
||||
final int numArtifacts = blackboardArtifacts.size();
|
||||
restartProgressHandle(Bundle.progressWindow_populatingXevents(type.getDisplayName()), "", 0D, numArtifacts, true);
|
||||
for (int i = 0; i < numArtifacts; i++) {
|
||||
try {
|
||||
//for each artifact, extract the relevant information for the descriptions
|
||||
insertEventForArtifact(type, blackboardArtifacts.get(i), trans);
|
||||
updateProgress(i, numArtifacts);
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "There was a problem inserting event for artifact: " + blackboardArtifacts.get(i).getArtifactID(), ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
} catch (TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "There was a problem getting events with sub type " + type.toString() + ".", ex); // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
private void insertEventForArtifact(final ArtifactEventType type, BlackboardArtifact bbart, EventDB.EventTransaction trans) throws TskCoreException {
|
||||
ArtifactEventType.AttributeEventDescription eventDescription = ArtifactEventType.buildEventDescription(type, bbart);
|
||||
|
||||
// if the time is legitimate ( greater than zero ) insert it into the db
|
||||
if (eventDescription != null && eventDescription.getTime() > 0) {
|
||||
long objectID = bbart.getObjectID();
|
||||
Content content = skCase.getContentById(objectID);
|
||||
long datasourceID = content.getDataSource().getId();
|
||||
long artifactID = bbart.getArtifactID();
|
||||
Set<String> hashSets = content.getHashSetNames();
|
||||
List<BlackboardArtifactTag> tags = tagsManager.getBlackboardArtifactTagsByArtifact(bbart);
|
||||
String fullDescription = eventDescription.getFullDescription();
|
||||
String medDescription = eventDescription.getMedDescription();
|
||||
String shortDescription = eventDescription.getShortDescription();
|
||||
eventDB.insertEvent(eventDescription.getTime(), type, datasourceID, objectID, artifactID, fullDescription, medDescription, shortDescription, null, hashSets, tags, trans);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,326 +0,0 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2013-16 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.db;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
import javax.annotation.Nonnull;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
|
||||
import org.sleuthkit.autopsy.timeline.filters.AbstractFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.DescriptionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.Filter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.IntersectionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
|
||||
import org.sleuthkit.autopsy.timeline.filters.UnionFilter;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD.FULL;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD.MEDIUM;
|
||||
import org.sleuthkit.autopsy.timeline.zooming.TimeUnits;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.DAYS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.HOURS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MINUTES;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.MONTHS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.SECONDS;
|
||||
import static org.sleuthkit.autopsy.timeline.zooming.TimeUnits.YEARS;
|
||||
import org.sleuthkit.datamodel.TskData;
|
||||
|
||||
/**
|
||||
* Static helper methods for converting between java "data model" objects and
|
||||
* sqlite queries.
|
||||
*/
|
||||
class SQLHelper {
|
||||
|
||||
static String useHashHitTablesHelper(RootFilter filter) {
|
||||
HashHitsFilter hashHitFilter = filter.getHashHitsFilter();
|
||||
return hashHitFilter.isActive() ? " LEFT JOIN hash_set_hits " : " "; //NON-NLS
|
||||
}
|
||||
|
||||
static String useTagTablesHelper(RootFilter filter) {
|
||||
TagsFilter tagsFilter = filter.getTagsFilter();
|
||||
return tagsFilter.isActive() ? " LEFT JOIN tags " : " "; //NON-NLS
|
||||
}
|
||||
|
||||
/**
|
||||
* take the result of a group_concat SQLite operation and split it into a
|
||||
* set of X using the mapper to to convert from string to X
|
||||
*
|
||||
* @param <X> the type of elements to return
|
||||
* @param groupConcat a string containing the group_concat result ( a comma
|
||||
* separated list)
|
||||
* @param mapper a function from String to X
|
||||
*
|
||||
* @return a Set of X, each element mapped from one element of the original
|
||||
* comma delimited string
|
||||
*/
|
||||
static <X> List<X> unGroupConcat(String groupConcat, Function<String, X> mapper) {
|
||||
return StringUtils.isBlank(groupConcat) ? Collections.emptyList()
|
||||
: Stream.of(groupConcat.split(","))
|
||||
.map(mapper::apply)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* get the SQL where clause corresponding to an intersection filter ie
|
||||
* (sub-clause1 and sub-clause2 and ... and sub-clauseN)
|
||||
*
|
||||
* @param filter the filter get the where clause for
|
||||
*
|
||||
* @return an SQL where clause (without the "where") corresponding to the
|
||||
* filter
|
||||
*/
|
||||
private static String getSQLWhere(IntersectionFilter<?> filter) {
|
||||
String join = String.join(" and ", filter.getSubFilters().stream()
|
||||
.filter(Filter::isActive)
|
||||
.map(SQLHelper::getSQLWhere)
|
||||
.collect(Collectors.toList()));
|
||||
return "(" + StringUtils.defaultIfBlank(join, "1") + ")";
|
||||
}
|
||||
|
||||
/**
|
||||
* get the SQL where clause corresponding to a union filter ie (sub-clause1
|
||||
* or sub-clause2 or ... or sub-clauseN)
|
||||
*
|
||||
* @param filter the filter get the where clause for
|
||||
*
|
||||
* @return an SQL where clause (without the "where") corresponding to the
|
||||
* filter
|
||||
*/
|
||||
private static String getSQLWhere(UnionFilter<?> filter) {
|
||||
String join = String.join(" or ", filter.getSubFilters().stream()
|
||||
.filter(Filter::isActive)
|
||||
.map(SQLHelper::getSQLWhere)
|
||||
.collect(Collectors.toList()));
|
||||
return "(" + StringUtils.defaultIfBlank(join, "1") + ")";
|
||||
}
|
||||
|
||||
static String getSQLWhere(RootFilter filter) {
|
||||
return getSQLWhere((Filter) filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* get the SQL where clause corresponding to the given filter
|
||||
*
|
||||
* uses instance of to dispatch to the correct method for each filter type.
|
||||
* NOTE: I don't like this if-else instance of chain, but I can't decide
|
||||
* what to do instead -jm
|
||||
*
|
||||
* @param filter a filter to generate the SQL where clause for
|
||||
*
|
||||
* @return an SQL where clause (without the "where") corresponding to the
|
||||
* filter
|
||||
*/
|
||||
private static String getSQLWhere(Filter filter) {
|
||||
String result = "";
|
||||
if (filter == null) {
|
||||
return "1";
|
||||
} else if (filter instanceof DescriptionFilter) {
|
||||
result = getSQLWhere((DescriptionFilter) filter);
|
||||
} else if (filter instanceof TagsFilter) {
|
||||
result = getSQLWhere((TagsFilter) filter);
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof DataSourceFilter) {
|
||||
result = getSQLWhere((DataSourceFilter) filter);
|
||||
} else if (filter instanceof DataSourcesFilter) {
|
||||
result = getSQLWhere((DataSourcesFilter) filter);
|
||||
} else if (filter instanceof HideKnownFilter) {
|
||||
result = getSQLWhere((HideKnownFilter) filter);
|
||||
} else if (filter instanceof HashHitsFilter) {
|
||||
result = getSQLWhere((HashHitsFilter) filter);
|
||||
} else if (filter instanceof TextFilter) {
|
||||
result = getSQLWhere((TextFilter) filter);
|
||||
} else if (filter instanceof TypeFilter) {
|
||||
result = getSQLWhere((TypeFilter) filter);
|
||||
} else if (filter instanceof IntersectionFilter) {
|
||||
result = getSQLWhere((IntersectionFilter) filter);
|
||||
} else if (filter instanceof UnionFilter) {
|
||||
result = getSQLWhere((UnionFilter) filter);
|
||||
} else {
|
||||
throw new IllegalArgumentException("getSQLWhere not defined for " + filter.getClass().getCanonicalName());
|
||||
}
|
||||
result = StringUtils.deleteWhitespace(result).equals("(1and1and1)") ? "1" : result; //NON-NLS
|
||||
result = StringUtils.deleteWhitespace(result).equals("()") ? "1" : result;
|
||||
return result;
|
||||
}
|
||||
|
||||
private static String getSQLWhere(HideKnownFilter filter) {
|
||||
if (filter.isActive()) {
|
||||
return "(known_state IS NOT '" + TskData.FileKnown.KNOWN.getFileKnownValue() + "')"; // NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(DescriptionFilter filter) {
|
||||
if (filter.isActive()) {
|
||||
String likeOrNotLike = (filter.getFilterMode() == DescriptionFilter.FilterMode.INCLUDE ? "" : " NOT") + " LIKE '"; //NON-NLS
|
||||
return "(" + getDescriptionColumn(filter.getDescriptionLoD()) + likeOrNotLike + filter.getDescription() + "' )"; // NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(TagsFilter filter) {
|
||||
if (filter.isActive()
|
||||
&& (filter.getSubFilters().isEmpty() == false)) {
|
||||
String tagNameIDs = filter.getSubFilters().stream()
|
||||
.filter((TagNameFilter t) -> t.isSelected() && !t.isDisabled())
|
||||
.map((TagNameFilter t) -> String.valueOf(t.getTagName().getId()))
|
||||
.collect(Collectors.joining(", ", "(", ")"));
|
||||
return "(events.event_id == tags.event_id AND " //NON-NLS
|
||||
+ "tags.tag_name_id IN " + tagNameIDs + ") "; //NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static String getSQLWhere(HashHitsFilter filter) {
|
||||
if (filter.isActive()
|
||||
&& (filter.getSubFilters().isEmpty() == false)) {
|
||||
String hashSetIDs = filter.getSubFilters().stream()
|
||||
.filter((HashSetFilter t) -> t.isSelected() && !t.isDisabled())
|
||||
.map((HashSetFilter t) -> String.valueOf(t.getHashSetID()))
|
||||
.collect(Collectors.joining(", ", "(", ")"));
|
||||
return "(hash_set_hits.hash_set_id IN " + hashSetIDs + " AND hash_set_hits.event_id == events.event_id)"; //NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(DataSourceFilter filter) {
|
||||
if (filter.isActive()) {
|
||||
return "(datasource_id = '" + filter.getDataSourceID() + "')"; //NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
private static String getSQLWhere(DataSourcesFilter filter) {
|
||||
return (filter.isActive()) ? "(datasource_id in (" //NON-NLS
|
||||
+ filter.getSubFilters().stream()
|
||||
.filter(AbstractFilter::isActive)
|
||||
.map((dataSourceFilter) -> String.valueOf(dataSourceFilter.getDataSourceID()))
|
||||
.collect(Collectors.joining(", ")) + "))" : "1";
|
||||
}
|
||||
|
||||
private static String getSQLWhere(TextFilter filter) {
|
||||
if (filter.isActive()) {
|
||||
if (StringUtils.isBlank(filter.getText())) {
|
||||
return "1";
|
||||
}
|
||||
String strippedFilterText = StringUtils.strip(filter.getText());
|
||||
return "((med_description like '%" + strippedFilterText + "%')" //NON-NLS
|
||||
+ " or (full_description like '%" + strippedFilterText + "%')" //NON-NLS
|
||||
+ " or (short_description like '%" + strippedFilterText + "%'))"; //NON-NLS
|
||||
} else {
|
||||
return "1";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* generate a sql where clause for the given type filter, while trying to be
|
||||
* as simple as possible to improve performance.
|
||||
*
|
||||
* @param typeFilter
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static String getSQLWhere(TypeFilter typeFilter) {
|
||||
if (typeFilter.isSelected() == false) {
|
||||
return "0";
|
||||
} else if (typeFilter.getEventType() instanceof RootEventType) {
|
||||
if (typeFilter.getSubFilters().stream()
|
||||
.allMatch(subFilter -> subFilter.isActive() && subFilter.getSubFilters().stream().allMatch(Filter::isActive))) {
|
||||
return "1"; //then collapse clause to true
|
||||
}
|
||||
}
|
||||
return "(sub_type IN (" + StringUtils.join(getActiveSubTypes(typeFilter), ",") + "))"; //NON-NLS
|
||||
}
|
||||
|
||||
private static List<Integer> getActiveSubTypes(TypeFilter filter) {
|
||||
if (filter.isActive()) {
|
||||
if (filter.getSubFilters().isEmpty()) {
|
||||
return Collections.singletonList(RootEventType.allTypes.indexOf(filter.getEventType()));
|
||||
} else {
|
||||
return filter.getSubFilters().stream().flatMap((Filter t) -> getActiveSubTypes((TypeFilter) t).stream()).collect(Collectors.toList());
|
||||
}
|
||||
} else {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* get a sqlite strftime format string that will allow us to group by the
|
||||
* requested period size. That is, with all info more granular than that
|
||||
* requested dropped (replaced with zeros).
|
||||
*
|
||||
* @param timeUnit the {@link TimeUnits} instance describing what
|
||||
* granularity to build a strftime string for
|
||||
*
|
||||
* @return a String formatted according to the sqlite strftime spec
|
||||
*
|
||||
* @see https://www.sqlite.org/lang_datefunc.html
|
||||
*/
|
||||
static String getStrfTimeFormat(@Nonnull TimeUnits timeUnit) {
|
||||
switch (timeUnit) {
|
||||
case YEARS:
|
||||
return "%Y-01-01T00:00:00"; // NON-NLS
|
||||
case MONTHS:
|
||||
return "%Y-%m-01T00:00:00"; // NON-NLS
|
||||
case DAYS:
|
||||
return "%Y-%m-%dT00:00:00"; // NON-NLS
|
||||
case HOURS:
|
||||
return "%Y-%m-%dT%H:00:00"; // NON-NLS
|
||||
case MINUTES:
|
||||
return "%Y-%m-%dT%H:%M:00"; // NON-NLS
|
||||
case SECONDS:
|
||||
default: //seconds - should never happen
|
||||
return "%Y-%m-%dT%H:%M:%S"; // NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
static String getDescriptionColumn(DescriptionLoD lod) {
|
||||
switch (lod) {
|
||||
case FULL:
|
||||
return "full_description"; //NON-NLS
|
||||
case MEDIUM:
|
||||
return "med_description"; //NON-NLS
|
||||
case SHORT:
|
||||
default:
|
||||
return "short_description"; //NON-NLS
|
||||
}
|
||||
}
|
||||
|
||||
private SQLHelper() {
|
||||
}
|
||||
}
|
0
Core/src/org/sleuthkit/autopsy/timeline/events/RefreshRequestedEvent.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/events/RefreshRequestedEvent.java
Normal file → Executable file
1
Core/src/org/sleuthkit/autopsy/timeline/events/TagsAddedEvent.java
Normal file → Executable file
1
Core/src/org/sleuthkit/autopsy/timeline/events/TagsAddedEvent.java
Normal file → Executable file
@ -22,6 +22,7 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* A TagsUpdatedEvent for tags that have been added to events.
|
||||
* NOTE: This event is internal to timeline components
|
||||
*/
|
||||
public class TagsAddedEvent extends TagsUpdatedEvent {
|
||||
|
||||
|
1
Core/src/org/sleuthkit/autopsy/timeline/events/TagsDeletedEvent.java
Normal file → Executable file
1
Core/src/org/sleuthkit/autopsy/timeline/events/TagsDeletedEvent.java
Normal file → Executable file
@ -22,6 +22,7 @@ import java.util.Set;
|
||||
|
||||
/**
|
||||
* A TagsUpdatedEvent for tags that have been removed from events.
|
||||
* NOTE: This event is internal to timeline components
|
||||
*/
|
||||
public class TagsDeletedEvent extends TagsUpdatedEvent {
|
||||
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/events/TagsUpdatedEvent.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/events/TagsUpdatedEvent.java
Normal file → Executable file
92
Core/src/org/sleuthkit/autopsy/timeline/events/TimelineEventAddedEvent.java
Executable file
92
Core/src/org/sleuthkit/autopsy/timeline/events/TimelineEventAddedEvent.java
Executable file
@ -0,0 +1,92 @@
|
||||
/*
|
||||
* Autopsy Forensic Browser
|
||||
*
|
||||
* Copyright 2018 Basis Technology Corp.
|
||||
* Contact: carrier <at> sleuthkit <dot> org
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.sleuthkit.autopsy.timeline.events;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import org.sleuthkit.autopsy.casemodule.Case;
|
||||
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
|
||||
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||
import org.sleuthkit.autopsy.events.AutopsyEvent;
|
||||
import org.sleuthkit.datamodel.TskCoreException;
|
||||
import org.sleuthkit.datamodel.TimelineEvent;
|
||||
|
||||
/**
|
||||
* An AutopsyEvent broadcast when a TimelineEvent is added to the case.
|
||||
*/
|
||||
public class TimelineEventAddedEvent extends AutopsyEvent {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final Logger logger = Logger.getLogger(TimelineEventAddedEvent.class.getName());
|
||||
|
||||
private transient TimelineEvent addedEvent;
|
||||
|
||||
public TimelineEventAddedEvent(org.sleuthkit.datamodel.TimelineManager.TimelineEventAddedEvent event) {
|
||||
super(Case.Events.TIMELINE_EVENT_ADDED.name(), null, event.getAddedEvent().getEventID());
|
||||
addedEvent = event.getAddedEvent();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the TimelineEvent that was added.
|
||||
*
|
||||
* @return The TimelineEvent or null if there is an error retrieving the
|
||||
* TimelineEvent.
|
||||
*/
|
||||
@Override
|
||||
public TimelineEvent getNewValue() {
|
||||
/**
|
||||
* The addedEvent field is set in the constructor, but it is transient
|
||||
* so it will become null when the event is serialized for publication
|
||||
* over a network. Doing a lazy load of the TimelineEvent object
|
||||
* bypasses the issues related to the serialization and de-serialization
|
||||
* of TimelineEvent objects and may also save database round trips from
|
||||
* other nodes since subscribers to this event are often not interested
|
||||
* in the event data.
|
||||
*/
|
||||
if (null != addedEvent) {
|
||||
return addedEvent;
|
||||
}
|
||||
try {
|
||||
Long addedEventID = (Long) super.getNewValue();
|
||||
addedEvent = Case.getCurrentCaseThrows().getSleuthkitCase().getTimelineManager().getEventById(addedEventID);
|
||||
return addedEvent;
|
||||
} catch (NoCurrentCaseException | TskCoreException ex) {
|
||||
logger.log(Level.SEVERE, "Error doing lazy load for remote event", ex); //NON-NLS
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the TimelineEvent that was added.
|
||||
*
|
||||
* @return The TimelineEvent or null if there is an error retrieving the
|
||||
* TimelineEvent.
|
||||
*/
|
||||
public TimelineEvent getAddedEvent() {
|
||||
return getNewValue();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the Id of the event that was added.
|
||||
*
|
||||
* @return The Id of the event that was added.
|
||||
*/
|
||||
public long getAddedEventID() {
|
||||
return (long) super.getNewValue();
|
||||
}
|
||||
}
|
0
Core/src/org/sleuthkit/autopsy/timeline/events/ViewInTimelineRequestedEvent.java
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/events/ViewInTimelineRequestedEvent.java
Normal file → Executable file
@ -3,9 +3,8 @@ EventNode.getAction.linkedFileMessage=There was a problem getting actions for th
|
||||
# {0} - maximum number of events to display
|
||||
# {1} - the number of events that is too many
|
||||
EventRoodNode.tooManyNode.displayName=Too many events to display. Maximum = {0}. But there are {1} to display.
|
||||
NodeProperty.displayName.baseType=Base Type
|
||||
NodeProperty.displayName.dateTime=Date/Time
|
||||
NodeProperty.displayName.description=Description
|
||||
NodeProperty.displayName.eventType=Event Type
|
||||
NodeProperty.displayName.icon=Icon
|
||||
NodeProperty.displayName.known=Known
|
||||
NodeProperty.displayName.subType=Sub Type
|
||||
|
0
Core/src/org/sleuthkit/autopsy/timeline/explorernodes/Bundle_ja.properties
Normal file → Executable file
0
Core/src/org/sleuthkit/autopsy/timeline/explorernodes/Bundle_ja.properties
Normal file → Executable file
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user