Revert "merge develop into Timeline event mgr"

This commit is contained in:
Richard Cordovano 2019-04-19 13:16:26 -04:00 committed by GitHub
parent f8e359f0ba
commit 2afc59ee39
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2730 changed files with 9784 additions and 173933 deletions

View File

@ -40,10 +40,6 @@
<fileset dir="${thirdparty.dir}/Tesseract-OCR"/> <fileset dir="${thirdparty.dir}/Tesseract-OCR"/>
</copy> </copy>
<!--Copy Plaso to release-->
<copy todir="${basedir}/release/plaso" >
<fileset dir="${thirdparty.dir}/plaso"/>
</copy>
<!--Copy GStreamer to release--> <!--Copy GStreamer to release-->
<copy todir="${basedir}/release/gstreamer" > <copy todir="${basedir}/release/gstreamer" >
<fileset dir="${thirdparty.dir}/gstreamer"/> <fileset dir="${thirdparty.dir}/gstreamer"/>

View File

@ -230,10 +230,15 @@ AddImageWizardIngestConfigPanel.dsProcDone.errs.text=*Errors encountered in addi
AddImageWizardIngestConfigVisual.getName.text=Configure Ingest Modules AddImageWizardIngestConfigVisual.getName.text=Configure Ingest Modules
AddImageWizardIterator.stepXofN=Step {0} of {1} AddImageWizardIterator.stepXofN=Step {0} of {1}
AddLocalFilesTask.localFileAdd.progress.text=Adding: {0}/{1} AddLocalFilesTask.localFileAdd.progress.text=Adding: {0}/{1}
Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open! Case.getCurCase.exception.noneOpen=Cannot get the current case; there is no case open\!
Case.open.msgDlg.updated.msg=Updated case database schema.\nA backup copy of the database with the following path has been made:\n {0} Case.open.msgDlg.updated.msg=Updated case database schema.\nA backup copy of the database with the following path has been made:\n {0}
Case.open.msgDlg.updated.title=Case Database Schema Update Case.open.msgDlg.updated.title=Case Database Schema Update
Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \nthis case are missing. Would you like to search for them now?\nPreviously, the image was located at:\n{0}\nPlease note that you will still be able to browse directories and generate reports\nif you choose No, but you will not be able to view file content or run the ingest process. Case.checkImgExist.confDlg.doesntExist.msg=One of the images associated with \n\
this case are missing. Would you like to search for them now?\n\
Previously, the image was located at:\n\
{0}\n\
Please note that you will still be able to browse directories and generate reports\n\
if you choose No, but you will not be able to view file content or run the ingest process.
Case.checkImgExist.confDlg.doesntExist.title=Missing Image Case.checkImgExist.confDlg.doesntExist.title=Missing Image
Case.addImg.exception.msg=Error adding image to the case Case.addImg.exception.msg=Error adding image to the case
Case.updateCaseName.exception.msg=Error while trying to update the case name. Case.updateCaseName.exception.msg=Error while trying to update the case name.
@ -252,9 +257,12 @@ Case.GetCaseTypeGivenPath.Failure=Unable to get case type
Case.metaDataFileCorrupt.exception.msg=The case metadata file (.aut) is corrupted. Case.metaDataFileCorrupt.exception.msg=The case metadata file (.aut) is corrupted.
Case.deleteReports.deleteFromDiskException.log.msg=Unable to delete the report from the disk. Case.deleteReports.deleteFromDiskException.log.msg=Unable to delete the report from the disk.
Case.deleteReports.deleteFromDiskException.msg=Unable to delete the report {0} from the disk.\nYou may manually delete it from {1} Case.deleteReports.deleteFromDiskException.msg=Unable to delete the report {0} from the disk.\nYou may manually delete it from {1}
CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \nCase Name: {0}\nCase Directory: {1} CaseDeleteAction.closeConfMsg.text=Are you sure want to close and delete this case? \n\
Case Name: {0}\n\
Case Directory: {1}
CaseDeleteAction.closeConfMsg.title=Warning: Closing the Current Case CaseDeleteAction.closeConfMsg.title=Warning: Closing the Current Case
CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\nClose the folder and file and try again or you can delete the case manually. CaseDeleteAction.msgDlg.fileInUse.msg=The delete action cannot be fully completed because the folder or file in it is open by another program.\n\n\
Close the folder and file and try again or you can delete the case manually.
CaseDeleteAction.msgDlg.fileInUse.title=Error: Folder In Use CaseDeleteAction.msgDlg.fileInUse.title=Error: Folder In Use
CaseDeleteAction.msgDlg.caseDelete.msg=Case {0} has been deleted. CaseDeleteAction.msgDlg.caseDelete.msg=Case {0} has been deleted.
CaseOpenAction.autFilter.title={0} Case File ( {1}) CaseOpenAction.autFilter.title={0} Case File ( {1})
@ -286,7 +294,8 @@ NewCaseWizardAction.databaseProblem1.text=Cannot open database. Cancelling case
NewCaseWizardAction.databaseProblem2.text=Error NewCaseWizardAction.databaseProblem2.text=Error
NewCaseWizardPanel1.validate.errMsg.invalidSymbols=The Case Name cannot contain any of the following symbols: \\ / : * ? " &lt; > | NewCaseWizardPanel1.validate.errMsg.invalidSymbols=The Case Name cannot contain any of the following symbols: \\ / : * ? " &lt; > |
NewCaseWizardPanel1.validate.errMsg.dirExists=Case directory ''{0}'' already exists. NewCaseWizardPanel1.validate.errMsg.dirExists=Case directory ''{0}'' already exists.
NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\nDo you want to create that directory? NewCaseWizardPanel1.validate.confMsg.createDir.msg=The base directory "{0}" does not exist. \n\n\
Do you want to create that directory?
NewCaseWizardPanel1.validate.confMsg.createDir.title=Create directory NewCaseWizardPanel1.validate.confMsg.createDir.title=Create directory
NewCaseWizardPanel1.validate.errMsg.cantCreateParDir.msg=Error: Could not create case parent directory {0} NewCaseWizardPanel1.validate.errMsg.cantCreateParDir.msg=Error: Could not create case parent directory {0}
NewCaseWizardPanel1.validate.errMsg.prevCreateBaseDir.msg=Prevented from creating base directory {0} NewCaseWizardPanel1.validate.errMsg.prevCreateBaseDir.msg=Prevented from creating base directory {0}
@ -322,15 +331,15 @@ StartupWindow.title.text=Welcome
UpdateRecentCases.menuItem.clearRecentCases.text=Clear Recent Cases UpdateRecentCases.menuItem.clearRecentCases.text=Clear Recent Cases
UpdateRecentCases.menuItem.empty=-Empty- UpdateRecentCases.menuItem.empty=-Empty-
AddImageWizardIngestConfigPanel.CANCEL_BUTTON.text=Cancel AddImageWizardIngestConfigPanel.CANCEL_BUTTON.text=Cancel
NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on "C:" drive NewCaseVisualPanel1.CaseFolderOnCDriveError.text=Warning: Path to multi-user case folder is on \"C:\" drive
NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on "C:" drive. Case folder is created on the target system NewCaseVisualPanel1.CaseFolderOnInternalDriveWindowsError.text=Warning: Path to case folder is on \"C:\" drive. Case folder is created on the target system
NewCaseVisualPanel1.CaseFolderOnInternalDriveLinuxError.text=Warning: Path to case folder is on the target system. Create case folder in mounted drive. NewCaseVisualPanel1.CaseFolderOnInternalDriveLinuxError.text=Warning: Path to case folder is on the target system. Create case folder in mounted drive.
CollaborationMonitor.addingDataSourceStatus.msg={0} adding data source CollaborationMonitor.addingDataSourceStatus.msg={0} adding data source
CollaborationMonitor.analyzingDataSourceStatus.msg={0} analyzing {1} CollaborationMonitor.analyzingDataSourceStatus.msg={0} analyzing {1}
MissingImageDialog.lbWarning.text= MissingImageDialog.lbWarning.text=
MissingImageDialog.lbWarning.toolTipText= MissingImageDialog.lbWarning.toolTipText=
NewCaseVisualPanel1.caseParentDirWarningLabel.text= NewCaseVisualPanel1.caseParentDirWarningLabel.text=
NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-user\t\t NewCaseVisualPanel1.multiUserCaseRadioButton.text=Multi-user
NewCaseVisualPanel1.singleUserCaseRadioButton.text=Single-user NewCaseVisualPanel1.singleUserCaseRadioButton.text=Single-user
NewCaseVisualPanel1.caseTypeLabel.text=Case Type: NewCaseVisualPanel1.caseTypeLabel.text=Case Type:
SingleUserCaseConverter.BadDatabaseFileName=Database file does not exist! SingleUserCaseConverter.BadDatabaseFileName=Database file does not exist!

View File

@ -19,7 +19,6 @@
package org.sleuthkit.autopsy.casemodule; package org.sleuthkit.autopsy.casemodule;
import com.google.common.annotations.Beta; import com.google.common.annotations.Beta;
import com.google.common.eventbus.Subscribe;
import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData; import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData;
import java.awt.Frame; import java.awt.Frame;
import java.awt.event.ActionEvent; import java.awt.event.ActionEvent;
@ -69,6 +68,7 @@ import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.actions.OpenOutputFolderAction; import org.sleuthkit.autopsy.actions.OpenOutputFolderAction;
import org.sleuthkit.autopsy.appservices.AutopsyService; import org.sleuthkit.autopsy.appservices.AutopsyService;
import org.sleuthkit.autopsy.appservices.AutopsyService.CaseContext; import org.sleuthkit.autopsy.appservices.AutopsyService.CaseContext;
import static org.sleuthkit.autopsy.casemodule.Bundle.*;
import org.sleuthkit.autopsy.casemodule.CaseMetadata.CaseMetadataException; import org.sleuthkit.autopsy.casemodule.CaseMetadata.CaseMetadataException;
import org.sleuthkit.autopsy.casemodule.datasourcesummary.DataSourceSummaryAction; import org.sleuthkit.autopsy.casemodule.datasourcesummary.DataSourceSummaryAction;
import org.sleuthkit.autopsy.casemodule.events.AddingDataSourceEvent; import org.sleuthkit.autopsy.casemodule.events.AddingDataSourceEvent;
@ -108,17 +108,12 @@ import org.sleuthkit.autopsy.events.AutopsyEventException;
import org.sleuthkit.autopsy.events.AutopsyEventPublisher; import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
import org.sleuthkit.autopsy.ingest.IngestJob; import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException; import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
import org.sleuthkit.autopsy.progress.LoggingProgressIndicator; import org.sleuthkit.autopsy.progress.LoggingProgressIndicator;
import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator; import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator;
import org.sleuthkit.autopsy.progress.ProgressIndicator; import org.sleuthkit.autopsy.progress.ProgressIndicator;
import org.sleuthkit.autopsy.timeline.OpenTimelineAction; import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
import org.sleuthkit.autopsy.timeline.events.TimelineEventAddedEvent;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag; import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.CaseDbConnectionInfo; import org.sleuthkit.datamodel.CaseDbConnectionInfo;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
@ -126,7 +121,6 @@ import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Report; import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException; import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
@ -161,7 +155,6 @@ public class Case {
private CollaborationMonitor collaborationMonitor; private CollaborationMonitor collaborationMonitor;
private Services caseServices; private Services caseServices;
private boolean hasDataSources; private boolean hasDataSources;
private final TSKCaseRepublisher tskEventForwarder = new TSKCaseRepublisher();
/* /*
* Get a reference to the main window of the desktop application to use to * Get a reference to the main window of the desktop application to use to
@ -395,43 +388,13 @@ public class Case {
*/ */
TAG_DEFINITION_CHANGED, TAG_DEFINITION_CHANGED,
/** /**
* An timeline event, such mac time or web activity was added to the * An item in the central repository has had its comment modified. The
* current case. The old value is null and the new value is the * old value is null, the new value is string for current comment.
* TimelineEvent that was added.
*/
TIMELINE_EVENT_ADDED,
/* An item in the central repository has had its comment
* modified. The old value is null, the new value is string for current
* comment.
*/ */
CR_COMMENT_CHANGED; CR_COMMENT_CHANGED;
}; };
private final class TSKCaseRepublisher {
@Subscribe
public void rebroadcastTimelineEventCreated(TimelineManager.TimelineEventAddedEvent event) {
eventPublisher.publish(new TimelineEventAddedEvent(event));
}
@Subscribe
public void rebroadcastArtifactsPosted(Blackboard.ArtifactsPostedEvent event) {
for (BlackboardArtifact.Type artifactType : event.getArtifactTypes()) {
/*
* fireModuleDataEvent is deprecated so module writers don't use
* it (they should use Blackboard.postArtifact(s) instead), but
* we still need a way to rebroadcast the ArtifactsPostedEvent
* as a ModuleDataEvent.
*/
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(
event.getModuleName(),
artifactType,
event.getArtifacts(artifactType)));
}
}
}
/** /**
* Adds a subscriber to all case events. To subscribe to only specific * Adds a subscriber to all case events. To subscribe to only specific
* events, use one of the overloads of addEventSubscriber. * events, use one of the overloads of addEventSubscriber.
@ -536,8 +499,8 @@ public class Case {
*/ */
public static boolean isValidName(String caseName) { public static boolean isValidName(String caseName) {
return !(caseName.contains("\\") || caseName.contains("/") || caseName.contains(":") return !(caseName.contains("\\") || caseName.contains("/") || caseName.contains(":")
|| caseName.contains("*") || caseName.contains("?") || caseName.contains("\"") || caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
|| caseName.contains("<") || caseName.contains(">") || caseName.contains("|")); || caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
} }
/** /**
@ -2165,7 +2128,7 @@ public class Case {
} else if (UserPreferences.getIsMultiUserModeEnabled()) { } else if (UserPreferences.getIsMultiUserModeEnabled()) {
caseDb = SleuthkitCase.openCase(databaseName, UserPreferences.getDatabaseConnectionInfo(), metadata.getCaseDirectory()); caseDb = SleuthkitCase.openCase(databaseName, UserPreferences.getDatabaseConnectionInfo(), metadata.getCaseDirectory());
} else { } else {
throw new CaseActionException(Bundle.Case_open_exception_multiUserCaseNotEnabled()); throw new CaseActionException(Case_open_exception_multiUserCaseNotEnabled());
} }
} catch (TskUnsupportedSchemaVersionException ex) { } catch (TskUnsupportedSchemaVersionException ex) {
throw new CaseActionException(Bundle.Case_exceptionMessage_unsupportedSchemaVersionMessage(ex.getLocalizedMessage()), ex); throw new CaseActionException(Bundle.Case_exceptionMessage_unsupportedSchemaVersionMessage(ex.getLocalizedMessage()), ex);
@ -2187,8 +2150,6 @@ public class Case {
private void openCaseLevelServices(ProgressIndicator progressIndicator) { private void openCaseLevelServices(ProgressIndicator progressIndicator) {
progressIndicator.progress(Bundle.Case_progressMessage_openingCaseLevelServices()); progressIndicator.progress(Bundle.Case_progressMessage_openingCaseLevelServices());
this.caseServices = new Services(caseDb); this.caseServices = new Services(caseDb);
caseDb.registerForEvents(tskEventForwarder);
} }
/** /**
@ -2454,7 +2415,6 @@ public class Case {
*/ */
if (null != caseDb) { if (null != caseDb) {
progressIndicator.progress(Bundle.Case_progressMessage_closingCaseDatabase()); progressIndicator.progress(Bundle.Case_progressMessage_closingCaseDatabase());
caseDb.unregisterForEvents(tskEventForwarder);
caseDb.close(); caseDb.close();
} }

View File

@ -19,22 +19,24 @@
package org.sleuthkit.autopsy.casemodule.services; package org.sleuthkit.autopsy.casemodule.services;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
/** /**
* A representation of the blackboard, a place where artifacts and their * A representation of the blackboard, a place where artifacts and their
* attributes are posted. * attributes are posted.
* *
* NOTE: This API of this class is under development. * NOTE: This API of this class is under development.
*
* @deprecated Use org.sleuthkit.datamodel.Blackboard instead.
*/ */
@Deprecated
public final class Blackboard implements Closeable { public final class Blackboard implements Closeable {
private org.sleuthkit.datamodel.Blackboard delegate; private SleuthkitCase caseDb;
/** /**
* Constructs a representation of the blackboard, a place where artifacts * Constructs a representation of the blackboard, a place where artifacts
@ -43,24 +45,27 @@ public final class Blackboard implements Closeable {
* @param casedb The case database. * @param casedb The case database.
*/ */
Blackboard(SleuthkitCase casedb) { Blackboard(SleuthkitCase casedb) {
this.delegate = casedb.getBlackboard(); this.caseDb = casedb;
} }
/** /**
* Indexes the text associated with an artifact. * Indexes the text associated with the an artifact.
* *
* @param artifact The artifact to be indexed. * @param artifact The artifact to be indexed.
* *
* @throws BlackboardException If there is a problem indexing the artifact. * @throws BlackboardException If there is a problem indexing the artifact.
*/ */
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException { public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
if (null == delegate) { if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed"); throw new BlackboardException("Blackboard has been closed");
} }
KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
if (null == searchService) {
throw new BlackboardException("Keyword search service not found");
}
try { try {
delegate.postArtifact(artifact, ""); searchService.index(artifact);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) { } catch (TskCoreException ex) {
throw new BlackboardException("Error indexing artifact", ex); throw new BlackboardException("Error indexing artifact", ex);
} }
} }
@ -78,14 +83,19 @@ public final class Blackboard implements Closeable {
* artifact type. * artifact type.
*/ */
public synchronized BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException { public synchronized BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException {
if (null == delegate) { if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed"); throw new BlackboardException("Blackboard has been closed");
} }
try { try {
return delegate.getOrAddArtifactType(typeName, displayName); return caseDb.addBlackboardArtifactType(typeName, displayName);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) { } catch (TskDataException typeExistsEx) {
throw new BlackboardException("Delegate org.sleuthkit.datamodel.Blackboard threw exception.", ex); try {
return caseDb.getArtifactType(typeName);
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add artifact type", ex);
}
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add artifact type", ex);
} }
} }
@ -103,23 +113,30 @@ public final class Blackboard implements Closeable {
* attribute type. * attribute type.
*/ */
public synchronized BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException { public synchronized BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException {
if (null == delegate) { if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed"); throw new BlackboardException("Blackboard has been closed");
} }
try { try {
return delegate.getOrAddAttributeType(typeName, valueType, displayName); return caseDb.addArtifactAttributeType(typeName, valueType, displayName);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) { } catch (TskDataException typeExistsEx) {
throw new BlackboardException("Delegate org.sleuthkit.datamodel.Blackboard threw exception.", ex); try {
return caseDb.getAttributeType(typeName);
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add attribute type", ex);
}
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add attribute type", ex);
} }
} }
/** /**
* Closes the blackboard. * Closes the blackboard.
* *
* @throws IOException If there is a problem closing the blackboard.
*/ */
@Override @Override
public synchronized void close() { public synchronized void close() throws IOException {
delegate = null; caseDb = null;
} }
/** /**

View File

@ -2,7 +2,7 @@
* *
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2011-2017 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* Copyright 2012 42six Solutions. * Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com * Contact: aebadirad <at> 42six <dot> com
@ -35,7 +35,7 @@ import org.sleuthkit.datamodel.SleuthkitCase;
*/ */
public class Services implements Closeable { public class Services implements Closeable {
private final List<Closeable> servicesList = new ArrayList<>(); private final List<Closeable> services = new ArrayList<>();
private final FileManager fileManager; private final FileManager fileManager;
private final TagsManager tagsManager; private final TagsManager tagsManager;
private final KeywordSearchService keywordSearchService; private final KeywordSearchService keywordSearchService;
@ -49,19 +49,19 @@ public class Services implements Closeable {
*/ */
public Services(SleuthkitCase caseDb) { public Services(SleuthkitCase caseDb) {
fileManager = new FileManager(caseDb); fileManager = new FileManager(caseDb);
servicesList.add(fileManager); services.add(fileManager);
tagsManager = new TagsManager(caseDb); tagsManager = new TagsManager(caseDb);
servicesList.add(tagsManager); services.add(tagsManager);
//This lookup fails in the functional test code. See JIRA-4571 for details. //This lookup fails in the functional test code. See JIRA-4571 for details.
//For the time being, the closing of this service at line 108 will be made //For the time being, the closing of this service at line 108 will be made
//null safe so that the functional tests run with no issues. //null safe so that the functional tests run with no issues.
keywordSearchService = Lookup.getDefault().lookup(KeywordSearchService.class); keywordSearchService = Lookup.getDefault().lookup(KeywordSearchService.class);
servicesList.add(keywordSearchService); services.add(keywordSearchService);
blackboard = new Blackboard(caseDb); blackboard = new Blackboard(caseDb);
servicesList.add(blackboard); services.add(blackboard);
} }
/** /**
@ -95,10 +95,7 @@ public class Services implements Closeable {
* Gets the blackboard service for the current case. * Gets the blackboard service for the current case.
* *
* @return The blackboard service for the current case. * @return The blackboard service for the current case.
*
* @deprecated Use SleuthkitCase.getBlackboard() instead.
*/ */
@Deprecated
public Blackboard getBlackboard() { public Blackboard getBlackboard() {
return blackboard; return blackboard;
} }
@ -110,7 +107,7 @@ public class Services implements Closeable {
*/ */
@Override @Override
public void close() throws IOException { public void close() throws IOException {
for (Closeable service : servicesList) { for (Closeable service : services) {
if(service != null) { if(service != null) {
service.close(); service.close();
} }

View File

@ -5,7 +5,10 @@ CentralRepoCommentDialog.title.addEditCentralRepoComment=Add/Edit Central Reposi
OpenIDE-Module-Name=Central Repository OpenIDE-Module-Name=Central Repository
OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Short-Description=Correlation Engine Ingest Module OpenIDE-Module-Short-Description=Correlation Engine Ingest Module
OpenIDE-Module-Long-Description=Correlation Engine ingest module and central database. \n\nThe Correlation Engine ingest module stores attributes of artifacts matching selected correlation types into a central database.\nStored attributes are used in future cases to correlate and analyzes files and artifacts during ingest. OpenIDE-Module-Long-Description=\
Correlation Engine ingest module and central database. \n\n\
The Correlation Engine ingest module stores attributes of artifacts matching selected correlation types into a central database.\n\
Stored attributes are used in future cases to correlate and analyzes files and artifacts during ingest.
CentralRepoCommentDialog.commentLabel.text=Comment: CentralRepoCommentDialog.commentLabel.text=Comment:
CentralRepoCommentDialog.okButton.text=&OK CentralRepoCommentDialog.okButton.text=&OK
CentralRepoCommentDialog.cancelButton.text=C&ancel CentralRepoCommentDialog.cancelButton.text=C&ancel

View File

@ -23,7 +23,6 @@ import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener; import java.beans.PropertyChangeListener;
import static java.lang.Boolean.FALSE; import static java.lang.Boolean.FALSE;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
@ -35,40 +34,35 @@ import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT; import org.sleuthkit.datamodel.TskCoreException;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME; import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent; import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/** /**
* Listen for ingest events and update entries in the Central Repository * Listen for ingest events and update entries in the Central Repository
* database accordingly * database accordingly
*/ */
@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Correlation Engine"})
public class IngestEventsListener { public class IngestEventsListener {
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName()); private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>(); final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
private static int correlationModuleInstanceCount; private static int correlationModuleInstanceCount;
@ -195,20 +189,44 @@ public class IngestEventsListener {
} }
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)", @NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"IngestEventsListener.prevCaseComment.text=Previous Case: "}) "IngestEventsListener.prevCaseComment.text=Previous Case: ",
"IngestEventsListener.ingestmodule.name=Correlation Engine"})
static private void postCorrelatedBadArtifactToBlackboard(BlackboardArtifact bbArtifact, List<String> caseDisplayNames) { static private void postCorrelatedBadArtifactToBlackboard(BlackboardArtifact bbArtifact, List<String> caseDisplayNames) {
Collection<BlackboardAttribute> attributes = Arrays.asList( try {
new BlackboardAttribute( String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()), Collection<BlackboardAttribute> attributes = new ArrayList<>();
new BlackboardAttribute( attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
TSK_COMMENT, MODULE_NAME, Bundle.IngestEventsListener_prevTaggedSet_text()));
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))), attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
new BlackboardAttribute( Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
bbArtifact.getArtifactID()));
postArtifactToBlackboard(bbArtifact, attributes); SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
AbstractFile abstractFile = tskCase.getAbstractFileById(bbArtifact.getObjectID());
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
tifArtifact.addAttributes(attributes);
try {
// index the artifact for keyword search
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
// fire event to notify UI of this new artifact
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
}
} }
/** /**
@ -222,32 +240,34 @@ public class IngestEventsListener {
"# {1} - count", "# {1} - count",
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"}) "IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
static private void postCorrelatedPreviousArtifactToBlackboard(BlackboardArtifact bbArtifact) { static private void postCorrelatedPreviousArtifactToBlackboard(BlackboardArtifact bbArtifact) {
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text()),
new BlackboardAttribute(
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
bbArtifact.getArtifactID()));
postArtifactToBlackboard(bbArtifact, attributes);
}
private static void postArtifactToBlackboard(BlackboardArtifact bbArtifact, Collection<BlackboardAttribute> attributes) {
try { try {
String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
BlackboardAttribute att = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text());
attributes.add(att);
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
SleuthkitCase tskCase = bbArtifact.getSleuthkitCase(); SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
AbstractFile abstractFile = tskCase.getAbstractFileById(bbArtifact.getObjectID()); AbstractFile abstractFile = bbArtifact.getSleuthkitCase().getAbstractFileById(bbArtifact.getObjectID());
Blackboard blackboard = tskCase.getBlackboard(); org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_ARTIFACT_HIT, attributes)) { if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_ARTIFACT_HIT); BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
tifArtifact.addAttributes(attributes); tifArtifact.addAttributes(attributes);
try { try {
// index the artifact for keyword search // index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME); Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
} catch (Blackboard.BlackboardException ex) { blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
} }
// fire event to notify UI of this new artifact
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
@ -420,8 +440,8 @@ public class IngestEventsListener {
private final boolean createCorrelationAttributes; private final boolean createCorrelationAttributes;
private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes) { private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes) {
this.dbManager = db; dbManager = db;
this.event = evt; event = evt;
this.flagNotableItemsEnabled = flagNotableItemsEnabled; this.flagNotableItemsEnabled = flagNotableItemsEnabled;
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled; this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
this.createCorrelationAttributes = createCorrelationAttributes; this.createCorrelationAttributes = createCorrelationAttributes;
@ -463,7 +483,7 @@ public class IngestEventsListener {
} }
} }
if (flagPreviousItemsEnabled if (flagPreviousItemsEnabled
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID && (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID || eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID

View File

@ -19,44 +19,41 @@
package org.sleuthkit.autopsy.centralrepository.ingestmodule; package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance; import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.core.RuntimeProperties; import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.HashUtility; import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.datamodel.SleuthkitCase;
/** /**
* Ingest module for inserting entries into the Central Repository database on * Ingest module for inserting entries into the Central Repository database on
@ -66,8 +63,6 @@ import org.sleuthkit.datamodel.TskData;
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "}) "CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
final class CentralRepoIngestModule implements FileIngestModule { final class CentralRepoIngestModule implements FileIngestModule {
private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true; static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true;
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true; static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true;
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true; static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
@ -79,10 +74,10 @@ final class CentralRepoIngestModule implements FileIngestModule {
private long jobId; private long jobId;
private CorrelationCase eamCase; private CorrelationCase eamCase;
private CorrelationDataSource eamDataSource; private CorrelationDataSource eamDataSource;
private Blackboard blackboard;
private CorrelationAttributeInstance.Type filesType; private CorrelationAttributeInstance.Type filesType;
private final boolean flagTaggedNotableItems; private final boolean flagTaggedNotableItems;
private final boolean flagPreviouslySeenDevices; private final boolean flagPreviouslySeenDevices;
private Blackboard blackboard;
private final boolean createCorrelationProperties; private final boolean createCorrelationProperties;
/** /**
@ -109,7 +104,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
} }
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR; return ProcessResult.ERROR;
@ -163,7 +158,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
} }
} }
// insert this file into the central repository // insert this file into the central repository
if (createCorrelationProperties) { if (createCorrelationProperties) {
try { try {
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance( CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
@ -276,7 +271,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// Don't allow sqlite central repo databases to be used for multi user cases // Don't allow sqlite central repo databases to be used for multi user cases
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE) if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) { && (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository."); logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
} }
@ -313,7 +308,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// if we are the first thread / module for this job, then make sure the case // if we are the first thread / module for this job, then make sure the case
// and image exist in the DB before we associate artifacts with it. // and image exist in the DB before we associate artifacts with it.
if (refCounter.incrementAndGet(jobId) if (refCounter.incrementAndGet(jobId)
== 1) { == 1) {
// ensure we have this data source in the EAM DB // ensure we have this data source in the EAM DB
try { try {
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) { if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
@ -335,32 +330,41 @@ final class CentralRepoIngestModule implements FileIngestModule {
*/ */
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames) { private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames) {
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))));
try { try {
String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevTaggedSet_text()));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) { if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT); BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
tifArtifact.addAttributes(attributes); tifArtifact.addAttributes(attributes);
try { try {
// index the artifact for keyword search // index the artifact for keyword search
blackboard.postArtifact(tifArtifact, MODULE_NAME); blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
} }
// send inbox message // send inbox message
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash()); sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash());
// fire event to notify UI of this new artifact
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) { } catch (IllegalStateException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); // NON-NLS
} }
} }

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.commonpropertiessearch;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -31,7 +30,6 @@ import java.util.stream.Stream;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.datamodel.utils.FileTypeUtils;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
@ -149,7 +147,7 @@ public abstract class AbstractCommonAttributeSearcher {
} }
return instanceCollatedCommonFiles; return instanceCollatedCommonFiles;
} }
/* /*
* The set of the MIME types that will be checked for extension mismatches * The set of the MIME types that will be checked for extension mismatches
* when checkType is ONLY_MEDIA. ".jpg", ".jpeg", ".png", ".psd", ".nef", * when checkType is ONLY_MEDIA. ".jpg", ".jpeg", ".png", ".psd", ".nef",
@ -213,7 +211,6 @@ public abstract class AbstractCommonAttributeSearcher {
"application/vnd.oasis.opendocument.text" //NON-NLS "application/vnd.oasis.opendocument.text" //NON-NLS
).collect(Collectors.toSet()); ).collect(Collectors.toSet());
/** /**
* @return the filterByMedia * @return the filterByMedia
*/ */
@ -241,16 +238,4 @@ public abstract class AbstractCommonAttributeSearcher {
void setFilterByDoc(boolean filterByDoc) { void setFilterByDoc(boolean filterByDoc) {
this.filterByDoc = filterByDoc; this.filterByDoc = filterByDoc;
} }
Set<String> getMimeTypesToFilterOn() {
Set<String> mimeTypesToFilterOn = new HashSet<>();
if (isFilterByMedia()) {
mimeTypesToFilterOn.addAll(FileTypeUtils.FileTypeCategory.VISUAL.getMediaTypes());
}
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(FileTypeUtils.FileTypeCategory.DOCUMENTS.getMediaTypes());
}
return mimeTypesToFilterOn;
}
} }

View File

@ -20,14 +20,16 @@
package org.sleuthkit.autopsy.commonpropertiessearch; package org.sleuthkit.autopsy.commonpropertiessearch;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
import static org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeSearcher.MEDIA_PICS_VIDEO_MIME_TYPES;
/** /**
* Algorithm which finds files anywhere in the Central Repo which also occur in * Algorithm which finds files anywhere in the Central Repo which also occur in
@ -54,7 +56,13 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut
@Override @Override
public CommonAttributeCountSearchResults findMatchesByCount() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException { public CommonAttributeCountSearchResults findMatchesByCount() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException {
InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType);
Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Set<String> mimeTypesToFilterOn = new HashSet<>();
if (isFilterByMedia()) {
mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES);
}
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES);
}
Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn); Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn);
return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType);
} }
@ -62,7 +70,13 @@ public class AllInterCaseCommonAttributeSearcher extends InterCaseCommonAttribut
@Override @Override
public CommonAttributeCaseSearchResults findMatchesByCase() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException { public CommonAttributeCaseSearchResults findMatchesByCase() throws TskCoreException, NoCurrentCaseException, SQLException, EamDbException {
InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(corAttrType);
Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Set<String> mimeTypesToFilterOn = new HashSet<>();
if (isFilterByMedia()) {
mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES);
}
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES);
}
Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn); Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn);
return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType);
} }

View File

@ -19,10 +19,10 @@
*/ */
package org.sleuthkit.autopsy.commonpropertiessearch; package org.sleuthkit.autopsy.commonpropertiessearch;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
/** /**
* Provides logic for selecting common files from all data sources and all cases * Provides logic for selecting common files from all data sources and all cases

View File

@ -23,9 +23,9 @@ import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
@ -45,6 +45,8 @@ import org.sleuthkit.datamodel.TskCoreException;
@SuppressWarnings("PMD.AbstractNaming") @SuppressWarnings("PMD.AbstractNaming")
public abstract class IntraCaseCommonAttributeSearcher extends AbstractCommonAttributeSearcher { public abstract class IntraCaseCommonAttributeSearcher extends AbstractCommonAttributeSearcher {
private static final String FILTER_BY_MIME_TYPES_WHERE_CLAUSE = " and mime_type in (%s)"; //NON-NLS // where %s is csv list of mime_types to filter on
private final Map<Long, String> dataSourceIdToNameMap; private final Map<Long, String> dataSourceIdToNameMap;
/** /**
@ -156,14 +158,25 @@ public abstract class IntraCaseCommonAttributeSearcher extends AbstractCommonAtt
* to filter on were given. * to filter on were given.
*/ */
String determineMimeTypeFilter() { String determineMimeTypeFilter() {
Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn();
if (mimeTypesToFilterOn.isEmpty()) { Set<String> mimeTypesToFilterOn = new HashSet<>();
return ""; String mimeTypeString = "";
} else { if (isFilterByMedia()) {
String mimeTypeString = mimeTypesToFilterOn.stream() mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES);
.map(mimeType -> "'" + mimeType + "'")
.collect(Collectors.joining(","));
return String.format(" and mime_type in (%s)", new Object[]{mimeTypeString});
} }
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES);
}
StringBuilder mimeTypeFilter = new StringBuilder(mimeTypesToFilterOn.size());
if (!mimeTypesToFilterOn.isEmpty()) {
for (String mimeType : mimeTypesToFilterOn) {
mimeTypeFilter.append(SINGLE_QUOTE).append(mimeType).append(SINGLE_QUTOE_COMMA);
}
mimeTypeString = mimeTypeFilter.toString().substring(0, mimeTypeFilter.length() - 1);
mimeTypeString = String.format(FILTER_BY_MIME_TYPES_WHERE_CLAUSE, new Object[]{mimeTypeString});
}
return mimeTypeString;
} }
static final String SINGLE_QUTOE_COMMA = "',";
static final String SINGLE_QUOTE = "'";
} }

View File

@ -20,15 +20,17 @@
package org.sleuthkit.autopsy.commonpropertiessearch; package org.sleuthkit.autopsy.commonpropertiessearch;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase; import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException; import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance.Type;
import static org.sleuthkit.autopsy.commonpropertiessearch.AbstractCommonAttributeSearcher.MEDIA_PICS_VIDEO_MIME_TYPES;
/** /**
* *
@ -50,7 +52,7 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri
* @throws EamDbException * @throws EamDbException
*/ */
public SingleInterCaseCommonAttributeSearcher(int correlationCaseId, boolean filterByMediaMimeType, public SingleInterCaseCommonAttributeSearcher(int correlationCaseId, boolean filterByMediaMimeType,
boolean filterByDocMimeType, Type corAttrType, int percentageThreshold) throws EamDbException { boolean filterByDocMimeType, Type corAttrType, int percentageThreshold) throws EamDbException {
super(filterByMediaMimeType, filterByDocMimeType, corAttrType, percentageThreshold); super(filterByMediaMimeType, filterByDocMimeType, corAttrType, percentageThreshold);
this.corrleationCaseId = correlationCaseId; this.corrleationCaseId = correlationCaseId;
@ -74,7 +76,13 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri
CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId); CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId);
this.correlationCaseName = correlationCase.getDisplayName(); this.correlationCaseName = correlationCase.getDisplayName();
InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType);
Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Set<String> mimeTypesToFilterOn = new HashSet<>();
if (isFilterByMedia()) {
mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES);
}
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES);
}
Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase); Map<Integer, CommonAttributeValueList> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCount(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase);
return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); return new CommonAttributeCountSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType);
@ -97,7 +105,13 @@ public class SingleInterCaseCommonAttributeSearcher extends InterCaseCommonAttri
CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId); CorrelationCase correlationCase = this.getCorrelationCaseFromId(this.corrleationCaseId);
this.correlationCaseName = correlationCase.getDisplayName(); this.correlationCaseName = correlationCase.getDisplayName();
InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType); InterCaseSearchResultsProcessor eamDbAttrInst = new InterCaseSearchResultsProcessor(this.corAttrType);
Set<String> mimeTypesToFilterOn = getMimeTypesToFilterOn(); Set<String> mimeTypesToFilterOn = new HashSet<>();
if (isFilterByMedia()) {
mimeTypesToFilterOn.addAll(MEDIA_PICS_VIDEO_MIME_TYPES);
}
if (isFilterByDoc()) {
mimeTypesToFilterOn.addAll(TEXT_FILES_MIME_TYPES);
}
Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase); Map<String, Map<String, CommonAttributeValueList>> interCaseCommonFiles = eamDbAttrInst.findSingleInterCaseValuesByCase(Case.getCurrentCase(), mimeTypesToFilterOn, correlationCase);
return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType); return new CommonAttributeCaseSearchResults(interCaseCommonFiles, this.frequencyPercentageThreshold, this.corAttrType);

View File

@ -33,8 +33,7 @@ import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
/** /**
* Content viewer that incorporates file type-specific viewers, such as ones * Generic Application content viewer
* for pictures, video, etc.
*/ */
@ServiceProvider(service = DataContentViewer.class, position = 3) @ServiceProvider(service = DataContentViewer.class, position = 3)
@SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives @SuppressWarnings("PMD.SingularField") // UI widgets cause lots of false positives
@ -74,7 +73,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
initComponents(); initComponents();
//LOGGER.log(Level.INFO, "Created ApplicationContentViewer instance: {0}", this); //NON-NLS LOGGER.log(Level.INFO, "Created ApplicationContentViewer instance: {0}", this); //NON-NLS
} }
/** /**
@ -120,7 +119,7 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
String mimeType = file.getMIMEType(); String mimeType = file.getMIMEType();
if (Strings.isNullOrEmpty(mimeType)) { if (Strings.isNullOrEmpty(mimeType)) {
// LOGGER.log(Level.INFO, "Mimetype not known for file: {0}", file.getName()); //NON-NLS LOGGER.log(Level.INFO, "Mimetype not known for file: {0}", file.getName()); //NON-NLS
try { try {
FileTypeDetector fileTypeDetector = new FileTypeDetector(); FileTypeDetector fileTypeDetector = new FileTypeDetector();
mimeType = fileTypeDetector.getMIMEType(file); mimeType = fileTypeDetector.getMIMEType(file);
@ -133,16 +132,18 @@ public class FileViewer extends javax.swing.JPanel implements DataContentViewer
if (mimeType.equalsIgnoreCase("application/octet-stream")) { if (mimeType.equalsIgnoreCase("application/octet-stream")) {
return; return;
} }
else {
FileTypeViewer viewer = getSupportingViewer(mimeType);
if (viewer != null) {
lastViewer = viewer;
FileTypeViewer viewer = getSupportingViewer(mimeType); viewer.setFile(file);
if (viewer != null) { this.removeAll();
lastViewer = viewer; this.add(viewer.getComponent());
this.repaint();
viewer.setFile(file); }
this.removeAll(); }
this.add(viewer.getComponent());
this.repaint();
}
} }
@Override @Override

View File

@ -3,7 +3,13 @@ Installer.closing.confirmationDialog.title=Ingest is Running
# {0} - exception message # {0} - exception message
Installer.closing.messageBox.caseCloseExceptionMessage=Error closing case: {0} Installer.closing.messageBox.caseCloseExceptionMessage=Error closing case: {0}
OpenIDE-Module-Display-Category=Infrastructure OpenIDE-Module-Display-Category=Infrastructure
OpenIDE-Module-Long-Description=This is the core Autopsy module.\n\nThe module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\nThe framework included in the module contains APIs for developing modules for ingest, viewers and reporting. The modules can be deployed as Plugins using the Autopsy plugin installer.\nThis module should not be uninstalled - without it, Autopsy will not run.\n\nFor more information, see http://www.sleuthkit.org/autopsy/ OpenIDE-Module-Long-Description=\
This is the core Autopsy module.\n\n\
The module contains the core components needed for the bare application to run; the RCP platform, windowing GUI, sleuthkit bindings, datamodel / storage, explorer, result viewers, content viewers, ingest framework, reporting, and core tools, such as the file search.\n\n\
The framework included in the module contains APIs for developing modules for ingest, viewers and reporting. \
The modules can be deployed as Plugins using the Autopsy plugin installer.\n\
This module should not be uninstalled - without it, Autopsy will not run.\n\n\
For more information, see http://www.sleuthkit.org/autopsy/
OpenIDE-Module-Name=Autopsy-Core OpenIDE-Module-Name=Autopsy-Core
OpenIDE-Module-Short-Description=Autopsy Core Module OpenIDE-Module-Short-Description=Autopsy Core Module
org_sleuthkit_autopsy_core_update_center=http://sleuthkit.org/autopsy/updates.xml org_sleuthkit_autopsy_core_update_center=http://sleuthkit.org/autopsy/updates.xml

View File

@ -63,9 +63,9 @@ DataContentViewerHex.pageLabel2.text=Page
DataContentViewerString.pageLabel2.text=Page DataContentViewerString.pageLabel2.text=Page
# Product Information panel # Product Information panel
LBL_Description=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div> LBL_Description=<div style=\"font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif;\">\n <b>Product Version:</b> {0} ({9}) <br><b>Sleuth Kit Version:</b> {7} <br><b>Netbeans RCP Build:</b> {8} <br> <b>Java:</b> {1}; {2}<br> <b>System:</b> {3}; {4}; {5}<br><b>Userdir:</b> {6}</div>
Format_OperatingSystem_Value={0} version {1} running on {2} Format_OperatingSystem_Value={0} version {1} running on {2}
LBL_Copyright=<div style="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style="color: #1E2A60;" href="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style="color: #1E2A60;" href="http://www.basistech.com/autopsy-training">http://www.basistech.com/autopsy-training</a></li><li>Commercial Support: <a style="color: #1E2A60;" href="http://www.basistech.com/digital-forensics/autopsy/support/">http://www.basistech.com/digital-forensics/autopsy/support/</a></li></ul>Copyright &copy; 2003-2018. </div> LBL_Copyright=<div style\="font-size: 12pt; font-family: Verdana, 'Verdana CE', Arial, 'Arial CE', 'Lucida Grande CE', lucida, 'Helvetica CE', sans-serif; ">Autopsy&trade; is a digital forensics platform based on The Sleuth Kit&trade; and other tools. <br><ul><li>General Information: <a style\="color: \#1E2A60;" href\="http://www.sleuthkit.org">http://www.sleuthkit.org</a>.</li><li>Training: <a style\="color: \#1E2A60;" href\="http://www.basistech.com/autopsy-training">http://www.basistech.com/autopsy-training</a></li><li>Commercial Support: <a style\="color: \#1E2A60;" href\="http://www.basistech.com/digital-forensics/autopsy/support/">http://www.basistech.com/digital-forensics/autopsy/support/</a></li></ul>Copyright &copy; 2003-2018. </div>
SortChooser.dialogTitle=Choose Sort Criteria SortChooser.dialogTitle=Choose Sort Criteria
ThumbnailViewChildren.progress.cancelling=(Cancelling) ThumbnailViewChildren.progress.cancelling=(Cancelling)
# {0} - file name # {0} - file name
@ -102,7 +102,7 @@ DataResultViewerThumbnail.pageNextButton.text=
DataResultViewerThumbnail.imagesLabel.text=Images: DataResultViewerThumbnail.imagesLabel.text=Images:
DataResultViewerThumbnail.imagesRangeLabel.text=- DataResultViewerThumbnail.imagesRangeLabel.text=-
DataResultViewerThumbnail.pageNumLabel.text=- DataResultViewerThumbnail.pageNumLabel.text=-
DataResultViewerThumbnail.filePathLabel.text=\ DataResultViewerThumbnail.filePathLabel.text=\ \ \
DataResultViewerThumbnail.goToPageLabel.text=Go to Page: DataResultViewerThumbnail.goToPageLabel.text=Go to Page:
DataResultViewerThumbnail.goToPageField.text= DataResultViewerThumbnail.goToPageField.text=
AdvancedConfigurationDialog.cancelButton.text=Cancel AdvancedConfigurationDialog.cancelButton.text=Cancel

View File

@ -23,7 +23,9 @@ PlatformUtil.getProcVmUsed.sigarNotInit.msg=Cannot get virt mem used, sigar not
PlatformUtil.getProcVmUsed.gen.msg=Cannot get virt mem used, {0} PlatformUtil.getProcVmUsed.gen.msg=Cannot get virt mem used, {0}
PlatformUtil.getJvmMemInfo.usageText=JVM heap usage: {0}, JVM non-heap usage: {1} PlatformUtil.getJvmMemInfo.usageText=JVM heap usage: {0}, JVM non-heap usage: {1}
PlatformUtil.getPhysicalMemInfo.usageText=Physical memory usage (max, total, free): {0}, {1}, {2} PlatformUtil.getPhysicalMemInfo.usageText=Physical memory usage (max, total, free): {0}, {1}, {2}
PlatformUtil.getAllMemUsageInfo.usageText={0}\n{1}\nProcess Virtual Memory: {2} PlatformUtil.getAllMemUsageInfo.usageText={0}\n\
{1}\n\
Process Virtual Memory: {2}
# {0} - file name # {0} - file name
ReadImageTask.mesageText=Reading image: {0} ReadImageTask.mesageText=Reading image: {0}
StringExtract.illegalStateException.cannotInit.msg=Unicode table not properly initialized, cannot instantiate StringExtract StringExtract.illegalStateException.cannotInit.msg=Unicode table not properly initialized, cannot instantiate StringExtract

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013-8 Basis Technology Corp. * Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -21,9 +21,9 @@ package org.sleuthkit.autopsy.coreutils;
import javafx.scene.paint.Color; import javafx.scene.paint.Color;
/** /**
* Utilities for dealing with colors. *
*/ */
final public class ColorUtilities { public class ColorUtilities {
private ColorUtilities() { private ColorUtilities() {
} }

View File

@ -128,7 +128,12 @@ public final class ExecUtil {
* @throws IOException if an I/O error occurs. * @throws IOException if an I/O error occurs.
*/ */
public static int execute(ProcessBuilder processBuilder) throws SecurityException, IOException { public static int execute(ProcessBuilder processBuilder) throws SecurityException, IOException {
return ExecUtil.execute(processBuilder, 30, TimeUnit.DAYS, () -> false); return ExecUtil.execute(processBuilder, 30, TimeUnit.DAYS, new ProcessTerminator() {
@Override
public boolean shouldTerminateProcess() {
return false;
}
});
} }
/** /**
@ -165,29 +170,6 @@ public final class ExecUtil {
*/ */
public static int execute(ProcessBuilder processBuilder, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException { public static int execute(ProcessBuilder processBuilder, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException {
Process process = processBuilder.start(); Process process = processBuilder.start();
return waitForTermination(processBuilder.command().get(0), process, timeOut, units, terminator);
}
/**
* Wait for the given process to finish, using the given ProcessTerminator.
*
* @param command The command that was used to start the process. Used
* only for logging purposes.
* @param process The process to wait for.
* @param terminator The ProcessTerminator used to determine if the process
* should be killed.
*
* @returnthe exit value of the process
*
* @throws SecurityException if a security manager exists and vetoes any
* aspect of running the process.
* @throws IOException if an I/o error occurs.
*/
public static int waitForTermination(String command, Process process, ProcessTerminator terminator) throws SecurityException, IOException {
return ExecUtil.waitForTermination(command, process, ExecUtil.DEFAULT_TIMEOUT, ExecUtil.DEFAULT_TIMEOUT_UNITS, terminator);
}
private static int waitForTermination(String command, Process process, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException {
try { try {
do { do {
process.waitFor(timeOut, units); process.waitFor(timeOut, units);
@ -196,7 +178,7 @@ public final class ExecUtil {
try { try {
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
} catch (InterruptedException exx) { } catch (InterruptedException exx) {
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command)); Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", processBuilder.command().get(0)));
} }
} }
} while (process.isAlive()); } while (process.isAlive());
@ -207,9 +189,9 @@ public final class ExecUtil {
try { try {
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
} catch (InterruptedException exx) { } catch (InterruptedException exx) {
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command)); Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", processBuilder.command().get(0)));
} }
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, "Thread interrupted while running {0}", command); // NON-NLS Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, "Thread interrupted while running {0}", processBuilder.command().get(0)); // NON-NLS
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
} }
return process.exitValue(); return process.exitValue();
@ -238,26 +220,15 @@ public final class ExecUtil {
process.destroyForcibly(); process.destroyForcibly();
} }
} catch (Exception ex) { } catch (Exception ex) {
Logger.getLogger(ExecUtil.class.getName()).log(Level.WARNING, "Error occurred when attempting to kill process: {0}", ex.getMessage()); // NON-NLS logger.log(Level.WARNING, "Error occurred when attempting to kill process: {0}", ex.getMessage()); // NON-NLS
} }
} }
@Deprecated
private static final Logger logger = Logger.getLogger(ExecUtil.class.getName()); private static final Logger logger = Logger.getLogger(ExecUtil.class.getName());
@Deprecated
private Process proc = null; private Process proc = null;
@Deprecated
private ExecUtil.StreamToStringRedirect errorStringRedirect = null; private ExecUtil.StreamToStringRedirect errorStringRedirect = null;
@Deprecated
private ExecUtil.StreamToStringRedirect outputStringRedirect = null; private ExecUtil.StreamToStringRedirect outputStringRedirect = null;
@Deprecated
private ExecUtil.StreamToWriterRedirect outputWriterRedirect = null; private ExecUtil.StreamToWriterRedirect outputWriterRedirect = null;
@Deprecated
private int exitValue = -100; private int exitValue = -100;
/** /**

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2014-18 Basis Technology Corp. * Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -18,6 +18,7 @@
*/ */
package org.sleuthkit.autopsy.coreutils; package org.sleuthkit.autopsy.coreutils;
import java.util.Objects;
import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.ReadOnlyBooleanProperty;
import javafx.beans.property.ReadOnlyBooleanWrapper; import javafx.beans.property.ReadOnlyBooleanWrapper;
import javafx.beans.property.ReadOnlyObjectProperty; import javafx.beans.property.ReadOnlyObjectProperty;
@ -29,11 +30,12 @@ import javax.annotation.concurrent.ThreadSafe;
/** /**
* A basic history implementation. Keeps a history (and forward) stack of state * A basic history implementation. Keeps a history (and forward) stack of state
* objects of type T. exposes current state and availability of advance/retreat * objects of type T. exposes current state and availability of
* operations via methods and JFX Property objects. Null is not a valid state, * advance/retreat operations via methods and JFX Property objects. Null is not
* and will only be the current state before the first call to advance. * a valid state, and will only be the current state before the first call to
* advance.
* *
* @param <T> the type of objects used to represent the * @param T the type of objects used to represent the
* current/historical/future states * current/historical/future states
*/ */
@ThreadSafe @ThreadSafe
@ -117,21 +119,20 @@ public class History<T> {
} }
/** /**
* Retreat through the history states by one, and add the current state to * retreat through the history states by one, and add the current state to
* the forward states. Is a no-op if there are no history states. * the forward states. Is a no-op if there are no history states.
* *
* @return the state retreated to, or null if there were no history states. * @return the state retreated to, or null if there were no history states.
*/ */
synchronized public T retreat() { synchronized public T retreat() {
final T pop = historyStack.pop(); final T pop = historyStack.pop();
if (pop != null) {
if (pop.equals(currentState.get())) { if (pop != null && pop.equals(currentState.get()) == false) {
return retreat(); forwardStack.push(currentState.get());
} else { currentState.set(pop);
forwardStack.push(currentState.get()); return pop;
currentState.set(pop); } else if (pop != null && pop.equals(currentState.get())) {
return pop; return retreat();
}
} }
return pop; return pop;
} }
@ -146,7 +147,7 @@ public class History<T> {
* @throws IllegalArgumentException if newState == null * @throws IllegalArgumentException if newState == null
*/ */
synchronized public void advance(T newState) throws IllegalArgumentException { synchronized public void advance(T newState) throws IllegalArgumentException {
if (newState != null && newState.equals(currentState.get()) == false) { if (newState != null && Objects.equals(currentState.get(), newState) == false) {
if (currentState.get() != null) { if (currentState.get() != null) {
historyStack.push(currentState.get()); historyStack.push(currentState.get());
} }

View File

@ -1,19 +1,19 @@
/* /*
* *
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2012-2018 Basis Technology Corp. * Copyright 2012 Basis Technology Corp.
* *
* Copyright 2012 42six Solutions. * Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com * Contact: aebadirad <at> 42six <dot> com
* Project Contact/Architect: carrier <at> sleuthkit <dot> org * Project Contact/Architect: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -27,12 +27,12 @@ import java.sql.DriverManager;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Statement; import java.sql.Statement;
import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger;
/** /**
* Database connection class & utilities. * Database connection class & utilities *
*/ */
public class SQLiteDBConnect implements AutoCloseable { public class SQLiteDBConnect {
public String sDriver = ""; public String sDriver = "";
public String sUrl = null; public String sUrl = null;
@ -52,7 +52,7 @@ public class SQLiteDBConnect implements AutoCloseable {
* quick and dirty constructor to test the database passing the * quick and dirty constructor to test the database passing the
* DriverManager name and the fully loaded url to handle * DriverManager name and the fully loaded url to handle
*/ */
/* /*
* NB this will typically be available if you make this class concrete and * NB this will typically be available if you make this class concrete and
* not abstract * not abstract
*/ */
@ -104,13 +104,9 @@ public class SQLiteDBConnect implements AutoCloseable {
statement.executeUpdate(instruction); statement.executeUpdate(instruction);
} }
/** processes an array of instructions e.g. a set of SQL command strings // processes an array of instructions e.g. a set of SQL command strings passed from a file
* passed from a file //NB you should ensure you either handle empty lines in files by either removing them or parsing them out
* // since they will generate spurious SQLExceptions when they are encountered during the iteration....
* NB you should ensure you either handle empty lines in files by either
* removing them or parsing them out since they will generate spurious
* SQLExceptions when they are encountered during the iteration....
*/
public void executeStmt(String[] instructionSet) throws SQLException { public void executeStmt(String[] instructionSet) throws SQLException {
for (int i = 0; i < instructionSet.length; i++) { for (int i = 0; i < instructionSet.length; i++) {
executeStmt(instructionSet[i]); executeStmt(instructionSet[i]);
@ -124,14 +120,7 @@ public class SQLiteDBConnect implements AutoCloseable {
public void closeConnection() { public void closeConnection() {
try { try {
conn.close(); conn.close();
} catch (SQLException ex) { } catch (Exception ignore) {
logger.log(Level.WARNING, "Unable to close connection to SQLite DB at " + sUrl, ex);
} }
//Implementing Autoclosable.close() allows this class to be used in try-with-resources.
}
@Override
public void close() {
closeConnection();
} }
} }

View File

@ -258,10 +258,10 @@ ImageNode.getActions.viewInNewWin.text=View in New Window
ImageNode.createSheet.name.name=Name ImageNode.createSheet.name.name=Name
ImageNode.createSheet.name.displayName=Name ImageNode.createSheet.name.displayName=Name
ImageNode.createSheet.name.desc=no description ImageNode.createSheet.name.desc=no description
Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null! Installer.exception.tskVerStringNull.msg=Sleuth Kit JNI test call returned without error, but version string was null\!
Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""! Installer.exception.taskVerStringBang.msg=Sleuth Kit JNI test call returned without error, but version string was ""\!
Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed!\n\nDetails: {0} Installer.tskLibErr.msg=Problem with Sleuth Kit JNI. Test call failed\!\n\nDetails: {0}
Installer.tskLibErr.err=Fatal Error! Installer.tskLibErr.err=Fatal Error\!
InterestingHits.interestingItems.text=INTERESTING ITEMS InterestingHits.interestingItems.text=INTERESTING ITEMS
InterestingHits.displayName.text=Interesting Items InterestingHits.displayName.text=Interesting Items
InterestingHits.createSheet.name.name=Name InterestingHits.createSheet.name.name=Name

View File

@ -51,7 +51,7 @@ public abstract class DisplayableItemNode extends AbstractNode {
* *
* @throws TskCoreException * @throws TskCoreException
*/ */
protected static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException { static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
BlackboardAttribute pathIDAttribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID)); BlackboardAttribute pathIDAttribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
if (pathIDAttribute != null) { if (pathIDAttribute != null) {
long contentID = pathIDAttribute.getValueLong(); long contentID = pathIDAttribute.getValueLong();

View File

@ -50,7 +50,6 @@ import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHS
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE; import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_DOWNLOAD_SOURCE;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -147,8 +146,8 @@ public class ExtractedContent implements AutopsyVisitableItem {
return filePath + "gps-search.png"; //NON-NLS return filePath + "gps-search.png"; //NON-NLS
} else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID()) { } else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_PROG_RUN.getTypeID()) {
return filePath + "installed.png"; //NON-NLS return filePath + "installed.png"; //NON-NLS
} else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() } else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() ||
|| typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID()) { typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID()) {
return filePath + "encrypted-file.png"; //NON-NLS return filePath + "encrypted-file.png"; //NON-NLS
} else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID()) { } else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID()) {
return filePath + "mismatch-16.png"; //NON-NLS return filePath + "mismatch-16.png"; //NON-NLS
@ -238,7 +237,6 @@ public class ExtractedContent implements AutopsyVisitableItem {
doNotShow.add(new BlackboardArtifact.Type(TSK_INTERESTING_ARTIFACT_HIT)); doNotShow.add(new BlackboardArtifact.Type(TSK_INTERESTING_ARTIFACT_HIT));
doNotShow.add(new BlackboardArtifact.Type(TSK_ACCOUNT)); doNotShow.add(new BlackboardArtifact.Type(TSK_ACCOUNT));
doNotShow.add(new BlackboardArtifact.Type(TSK_DATA_SOURCE_USAGE)); doNotShow.add(new BlackboardArtifact.Type(TSK_DATA_SOURCE_USAGE));
doNotShow.add(new BlackboardArtifact.Type(TSK_TL_EVENT));
doNotShow.add(new BlackboardArtifact.Type(TSK_DOWNLOAD_SOURCE) ); doNotShow.add(new BlackboardArtifact.Type(TSK_DOWNLOAD_SOURCE) );
} }
@ -267,7 +265,7 @@ public class ExtractedContent implements AutopsyVisitableItem {
*/ */
} }
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
/** /**
* This is a stop gap measure until a different way of handling * This is a stop gap measure until a different way of handling
* the closing of cases is worked out. Currently, remote events * the closing of cases is worked out. Currently, remote events
@ -463,7 +461,7 @@ public class ExtractedContent implements AutopsyVisitableItem {
*/ */
} }
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) { || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
/** /**
* Checking for a current case is a stop gap measure until a * Checking for a current case is a stop gap measure until a
* different way of handling the closing of cases is worked * different way of handling the closing of cases is worked

View File

@ -46,6 +46,7 @@ import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.CasePreferences; import org.sleuthkit.autopsy.casemodule.CasePreferences;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import static org.sleuthkit.autopsy.datamodel.Bundle.*; import static org.sleuthkit.autopsy.datamodel.Bundle.*;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;

View File

@ -173,10 +173,6 @@ public final class AutopsyEventPublisher {
stopRemotePublisher(); stopRemotePublisher();
++tryCount; ++tryCount;
} }
catch(RuntimeException ex) {
logger.log(Level.SEVERE, String.format("Runtime exception in attempting to publish %s using channel %s", event.getPropertyName(), currentChannelName), ex); //NON-NLS
break;
}
} }
} }
} }

View File

@ -34,10 +34,13 @@ import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;

View File

@ -1,16 +1,16 @@
/* /*
* Sample module in the public domain. Feel free to use this as a template * Sample module in the public domain. Feel free to use this as a template
* for your modules. * for your modules.
* *
* Contact: Brian Carrier [carrier <at> sleuthkit [dot] org] * Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
* *
* This is free and unencumbered software released into the public domain. * This is free and unencumbered software released into the public domain.
* *
* Anyone is free to copy, modify, publish, use, compile, sell, or * Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled * distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any * binary, for any purpose, commercial or non-commercial, and by any
* means. * means.
* *
* In jurisdictions that recognize copyright laws, the author or authors * In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the * of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit * software to the public domain. We make this dedication for the benefit
@ -18,31 +18,34 @@
* successors. We intend this dedication to be an overt act of * successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this * relinquishment in perpetuity of all present and future rights to this
* software under copyright law. * software under copyright law.
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE. * OTHER DEALINGS IN THE SOFTWARE.
*/ */
package org.sleuthkit.autopsy.examples; package org.sleuthkit.autopsy.examples;
import java.util.HashMap; import java.util.HashMap;
import java.util.logging.Level; import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
/** /**
@ -53,7 +56,7 @@ import org.sleuthkit.datamodel.TskData;
class SampleFileIngestModule implements FileIngestModule { class SampleFileIngestModule implements FileIngestModule {
private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>(); private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>();
private static final BlackboardAttribute.ATTRIBUTE_TYPE ATTR_TYPE = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT; private static BlackboardAttribute.ATTRIBUTE_TYPE attrType = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
private final boolean skipKnownFiles; private final boolean skipKnownFiles;
private IngestJobContext context = null; private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
@ -73,8 +76,8 @@ class SampleFileIngestModule implements FileIngestModule {
// Skip anything other than actual file system files. // Skip anything other than actual file system files.
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) || (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (file.isFile() == false)) { || (file.isFile() == false)) {
return IngestModule.ProcessResult.OK; return IngestModule.ProcessResult.OK;
} }
@ -98,7 +101,7 @@ class SampleFileIngestModule implements FileIngestModule {
// Make an attribute using the ID for the attribute attrType that // Make an attribute using the ID for the attribute attrType that
// was previously created. // was previously created.
BlackboardAttribute attr = new BlackboardAttribute(ATTR_TYPE, SampleIngestModuleFactory.getModuleName(), count); BlackboardAttribute attr = new BlackboardAttribute(attrType, SampleIngestModuleFactory.getModuleName(), count);
// Add the to the general info artifact for the file. In a // Add the to the general info artifact for the file. In a
// real module, you would likely have more complex data types // real module, you would likely have more complex data types
@ -110,15 +113,13 @@ class SampleFileIngestModule implements FileIngestModule {
// management of shared data. // management of shared data.
addToBlackboardPostCount(context.getJobId(), 1L); addToBlackboardPostCount(context.getJobId(), 1L);
/* // Fire an event to notify any listeners for blackboard postings.
* post the artifact which will index the artifact for keyword ModuleDataEvent event = new ModuleDataEvent(SampleIngestModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_GEN_INFO);
* search, and fire an event to notify UI of this new artifact IngestServices.getInstance().fireModuleDataEvent(event);
*/
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
return IngestModule.ProcessResult.OK; return IngestModule.ProcessResult.OK;
} catch (TskCoreException | Blackboard.BlackboardException ex) { } catch (TskCoreException ex) {
IngestServices ingestServices = IngestServices.getInstance(); IngestServices ingestServices = IngestServices.getInstance();
Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName()); Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName());
logger.log(Level.SEVERE, "Error processing file (id = " + file.getId() + ")", ex); logger.log(Level.SEVERE, "Error processing file (id = " + file.getId() + ")", ex);

View File

@ -14,7 +14,7 @@ KnownStatusSearchPanel.knownCheckBox.text=Known Status:
KnownStatusSearchPanel.knownBadOptionCheckBox.text=Notable KnownStatusSearchPanel.knownBadOptionCheckBox.text=Notable
KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL or other) KnownStatusSearchPanel.knownOptionCheckBox.text=Known (NSRL or other)
KnownStatusSearchPanel.unknownOptionCheckBox.text=Unknown KnownStatusSearchPanel.unknownOptionCheckBox.text=Unknown
DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected! DateSearchFilter.noneSelectedMsg.text=At least one date type must be selected\!
DateSearchPanel.dateCheckBox.text=Date: DateSearchPanel.dateCheckBox.text=Date:
DateSearchPanel.jLabel4.text=Timezone: DateSearchPanel.jLabel4.text=Timezone:
DateSearchPanel.jLabel3.text=*The date format is mm/dd/yyyy DateSearchPanel.jLabel3.text=*The date format is mm/dd/yyyy
@ -56,7 +56,7 @@ FileSearchPanel.search.results.details=Large number of matches may impact perfor
FileSearchPanel.search.exception.noFilterSelected.msg=At least one filter must be selected. FileSearchPanel.search.exception.noFilterSelected.msg=At least one filter must be selected.
FileSearchPanel.search.validationErr.msg=Validation Error: {0} FileSearchPanel.search.validationErr.msg=Validation Error: {0}
FileSearchPanel.emptyWhereClause.text=Invalid options, nothing to show. FileSearchPanel.emptyWhereClause.text=Invalid options, nothing to show.
KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected! KnownStatusSearchFilter.noneSelectedMsg.text=At least one known status must be selected\!
NameSearchFilter.emptyNameMsg.text=Must enter something for name search. NameSearchFilter.emptyNameMsg.text=Must enter something for name search.
SearchNode.getName.text=Search Result SearchNode.getName.text=Search Result
SizeSearchPanel.sizeCompareComboBox.equalTo=equal to SizeSearchPanel.sizeCompareComboBox.equalTo=equal to

View File

@ -140,7 +140,7 @@ IngestJob.cancelReason.outOfDiskSpace.text=Out of disk space
IngestJob.cancelReason.servicesDown.text=Services Down IngestJob.cancelReason.servicesDown.text=Services Down
IngestJob.cancelReason.caseClosed.text=Case closed IngestJob.cancelReason.caseClosed.text=Case closed
IngestJobSettingsPanel.globalSettingsButton.text=Global Settings IngestJobSettingsPanel.globalSettingsButton.text=Global Settings
gest= gest
IngestJobSettingsPanel.globalSettingsButton.actionCommand=Advanced IngestJobSettingsPanel.globalSettingsButton.actionCommand=Advanced
IngestJobSettingsPanel.globalSettingsButton.text=Global Settings IngestJobSettingsPanel.globalSettingsButton.text=Global Settings
IngestJobSettingsPanel.pastJobsButton.text=History IngestJobSettingsPanel.pastJobsButton.text=History

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2014-2019 Basis Technology Corp. * Copyright 2014-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -32,7 +32,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.io.NbObjectInputStream; import org.openide.util.io.NbObjectInputStream;
@ -312,10 +311,11 @@ public final class IngestJobSettings {
* Get the ingest module factories discovered by the ingest module * Get the ingest module factories discovered by the ingest module
* loader. * loader.
*/ */
List<IngestModuleFactory> moduleFactories = new ArrayList<>();
List<IngestModuleFactory> allModuleFactories = IngestModuleFactoryLoader.getIngestModuleFactories(); List<IngestModuleFactory> allModuleFactories = IngestModuleFactoryLoader.getIngestModuleFactories();
HashSet<String> loadedModuleNames = new HashSet<>();
// Add modules that are going to be used for this ingest depending on type. // Add modules that are going to be used for this ingest depending on type.
List<IngestModuleFactory> moduleFactories = new ArrayList<>();
for (IngestModuleFactory moduleFactory : allModuleFactories) { for (IngestModuleFactory moduleFactory : allModuleFactories) {
if (this.ingestType.equals(IngestType.ALL_MODULES)) { if (this.ingestType.equals(IngestType.ALL_MODULES)) {
moduleFactories.add(moduleFactory); moduleFactories.add(moduleFactory);
@ -326,21 +326,15 @@ public final class IngestJobSettings {
} }
} }
/**
* Make set of module names, and a set of names of modules that are
* enabled by default.
*/
Set<String> defaultEnabledModuleNames = new HashSet<>();
Set<String> loadedModuleNames = new HashSet<>();
for (IngestModuleFactory moduleFactory : moduleFactories) { for (IngestModuleFactory moduleFactory : moduleFactories) {
loadedModuleNames.add(moduleFactory.getModuleDisplayName()); loadedModuleNames.add(moduleFactory.getModuleDisplayName());
if (moduleFactory.isEnabledByDefault()) {
defaultEnabledModuleNames.add(moduleFactory.getModuleDisplayName());
}
} }
/** Get the enabled/disabled ingest modules settings for this context. */ /**
HashSet<String> enabledModuleNames = getModulesNames(executionContext, IngestJobSettings.ENABLED_MODULES_PROPERTY, makeCsvList(defaultEnabledModuleNames)); * Get the enabled/disabled ingest modules settings for this context. By
* default, all loaded modules are enabled.
*/
HashSet<String> enabledModuleNames = getModulesNames(executionContext, IngestJobSettings.ENABLED_MODULES_PROPERTY, makeCsvList(loadedModuleNames));
HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, ""); //NON-NLS HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, ""); //NON-NLS
/** /**
@ -377,17 +371,11 @@ public final class IngestJobSettings {
} else if (disabledModuleNames.contains(moduleName)) { } else if (disabledModuleNames.contains(moduleName)) {
moduleTemplate.setEnabled(false); moduleTemplate.setEnabled(false);
} else { } else {
/** // The module factory was loaded, but the module name does not
* The module factory was loaded, but the module name does not // appear in the enabled/disabled module settings. Treat the
* appear in the enabled/disabled module settings. Treat the // module as a new module and enable it by default.
* module as a new module and use its default enabled state. moduleTemplate.setEnabled(true);
*/ enabledModuleNames.add(moduleName);
moduleTemplate.setEnabled(moduleFactory.isEnabledByDefault());
if (moduleFactory.isEnabledByDefault()) {
enabledModuleNames.add(moduleName);
} else {
disabledModuleNames.add(moduleName);
}
} }
this.moduleTemplates.add(moduleTemplate); this.moduleTemplates.add(moduleTemplate);
} }
@ -527,7 +515,7 @@ public final class IngestJobSettings {
* @return The file path. * @return The file path.
*/ */
private String getModuleSettingsFilePath(IngestModuleFactory factory) { private String getModuleSettingsFilePath(IngestModuleFactory factory) {
String fileName = FactoryClassNameNormalizer.normalize(factory.getClass().getCanonicalName()) + IngestJobSettings.MODULE_SETTINGS_FILE_EXT; String fileName = FactoryClassNameNormalizer.normalize(factory.getClass().getCanonicalName()) + IngestJobSettings.MODULE_SETTINGS_FILE_EXT;
Path path = Paths.get(this.moduleSettingsFolderPath, fileName); Path path = Paths.get(this.moduleSettingsFolderPath, fileName);
return path.toAbsolutePath().toString(); return path.toAbsolutePath().toString();
} }
@ -588,7 +576,15 @@ public final class IngestJobSettings {
if (collection == null || collection.isEmpty()) { if (collection == null || collection.isEmpty()) {
return ""; return "";
} }
return String.join(", ", collection);
ArrayList<String> list = new ArrayList<>();
list.addAll(collection);
StringBuilder csvList = new StringBuilder();
for (int i = 0; i < list.size() - 1; ++i) {
csvList.append(list.get(i)).append(", ");
}
csvList.append(list.get(list.size() - 1));
return csvList.toString();
} }
/** /**

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2019 Basis Technology Corp. * Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -240,16 +240,4 @@ public interface IngestModuleFactory {
* @return A file ingest module instance. * @return A file ingest module instance.
*/ */
FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings); FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings);
/**
* Queries if the module this factory creates is enabled to run during
* ingest by default. A module might be disabled by default if is very
* specialized or takes a very long time to run during ingest.
*
* @return True if the module created by this factory is be enabled, by
* default, to run during ingest.
*/
default boolean isEnabledByDefault() {
return true;
}
} }

View File

@ -104,13 +104,9 @@ public final class IngestServices {
* *
* @param moduleDataEvent A module data event, i.e., an event that * @param moduleDataEvent A module data event, i.e., an event that
* encapsulates artifact data. * encapsulates artifact data.
*
* @deprecated use org.sleuthkit.datamodel.Blackboard.postArtifact instead.
*/ */
@Deprecated
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) { public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
IngestManager.getInstance().fireIngestModuleDataEvent(moduleDataEvent); IngestManager.getInstance().fireIngestModuleDataEvent(moduleDataEvent);
} }
/** /**
@ -174,7 +170,10 @@ public final class IngestServices {
* Sets all of the global configuration settings for an ingest module. * Sets all of the global configuration settings for an ingest module.
* *
* @param moduleName A unique identifier for the module. * @param moduleName A unique identifier for the module.
*
* @param moduleName moduleName identifier unique to that module
* @param settings A mapping of setting names to setting values. * @param settings A mapping of setting names to setting values.
*
*/ */
public void setConfigSettings(String moduleName, Map<String, String> settings) { public void setConfigSettings(String moduleName, Map<String, String> settings) {
ModuleSettings.setConfigSettings(moduleName, settings); ModuleSettings.setConfigSettings(moduleName, settings);

View File

@ -53,7 +53,7 @@ public class ModuleDataEvent extends ChangeEvent {
private Collection<BlackboardArtifact> artifacts; private Collection<BlackboardArtifact> artifacts;
/** /**
* @param moduleName Module name * @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard * @param artifactType Type of artifact that was posted to blackboard
*/ */
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType) { public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType) {
@ -63,9 +63,9 @@ public class ModuleDataEvent extends ChangeEvent {
} }
/** /**
* @param moduleName Module Name * @param moduleName Module Name
* @param blackboardArtifactType Type of the blackboard artifact posted to * @param blackboardArtifactType Type of the blackboard artifact posted to
* the blackboard * the blackboard
*/ */
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType) { public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType) {
super(blackboardArtifactType); super(blackboardArtifactType);
@ -74,10 +74,10 @@ public class ModuleDataEvent extends ChangeEvent {
} }
/** /**
* @param moduleName Module name * @param moduleName Module name
* @param blackboardArtifactType Type of artifact posted to the blackboard * @param blackboardArtifactType Type of artifact posted to the blackboard
* @param artifacts List of specific artifact ID values that * @param artifacts List of specific artifact ID values that were added to
* were added to blackboard * blackboard
*/ */
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType, Collection<BlackboardArtifact> artifacts) { public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType, Collection<BlackboardArtifact> artifacts) {
this(moduleName, blackboardArtifactType); this(moduleName, blackboardArtifactType);
@ -85,10 +85,10 @@ public class ModuleDataEvent extends ChangeEvent {
} }
/** /**
* @param moduleName Module name * @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard * @param artifactType Type of artifact that was posted to blackboard
* @param artifacts List of specific artifact values that were added to * @param artifacts List of specific artifact values that were added to
* blackboard * blackboard
*/ */
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType, Collection<BlackboardArtifact> artifacts) { public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType, Collection<BlackboardArtifact> artifacts) {
this(moduleName, artifactType); this(moduleName, artifactType);

View File

@ -21,14 +21,10 @@ package org.sleuthkit.autopsy.modules.dataSourceIntegrity;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import javax.xml.bind.DatatypeConverter; import javax.xml.bind.DatatypeConverter;
import org.openide.util.Exceptions; import java.util.Arrays;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
@ -37,19 +33,20 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskDataException; import org.sleuthkit.datamodel.TskDataException;
/** /**
* Data source ingest module that verifies the integrity of an Expert Witness * Data source ingest module that verifies the integrity of an Expert Witness
* Format (EWF) E01 image file by generating a hash of the file and comparing it * Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image. Will also generate hashes for any * to the value stored in the image. Will also generate hashes for any image-type
* image-type data source that has none. * data source that has none.
*/ */
public class DataSourceIntegrityIngestModule implements DataSourceIngestModule { public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@ -59,12 +56,11 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
private final boolean computeHashes; private final boolean computeHashes;
private final boolean verifyHashes; private final boolean verifyHashes;
private final List<HashData> hashDataList = new ArrayList<>(); private final List<HashData> hashDataList = new ArrayList<>();
private IngestJobContext context; private IngestJobContext context;
private Blackboard blackboard;
DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) { DataSourceIntegrityIngestModule(DataSourceIntegrityIngestSettings settings) {
computeHashes = settings.shouldComputeHashes(); computeHashes = settings.shouldComputeHashes();
verifyHashes = settings.shouldVerifyHashes(); verifyHashes = settings.shouldVerifyHashes();
@ -76,13 +72,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context; this.context = context;
// It's an error if the module is run without either option selected // It's an error if the module is run without either option selected
if (!(computeHashes || verifyHashes)) { if (!(computeHashes || verifyHashes)) {
throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected()); throw new IngestModuleException(Bundle.DataSourceIntegrityIngestModule_startup_noCheckboxesSelected());
} }
} }
@NbBundle.Messages({ @NbBundle.Messages({
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled", "DataSourceIntegrityIngestModule.process.skipCompute=Not computing new hashes for {0} since the option was disabled",
@ -101,29 +97,23 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
"# {1} - calculatedHashValue", "# {1} - calculatedHashValue",
"DataSourceIntegrityIngestModule.process.calcHashWithType=<li>Calculated {0} hash: {1} </li>", "DataSourceIntegrityIngestModule.process.calcHashWithType=<li>Calculated {0} hash: {1} </li>",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>", "DataSourceIntegrityIngestModule.process.calculateHashDone=<p>Data Source Hash Calculation Results for {0} </p>",
"DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated", "DataSourceIntegrityIngestModule.process.hashesCalculated= hashes calculated",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database", "DataSourceIntegrityIngestModule.process.errorSavingHashes= Error saving hashes for image {0} to the database",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database", "DataSourceIntegrityIngestModule.process.errorLoadingHashes= Error loading hashes for image {0} from the database",
"# {0} - hashAlgorithm", "# {0} - hashAlgorithm",
"# {1} - calculatedHashValue", "# {1} - calculatedHashValue",
"# {2} - storedHashValue", "# {2} - storedHashValue",
"DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n", "DataSourceIntegrityIngestModule.process.hashFailedForArtifact={0} hash verification failed:\n Calculated hash: {1}\n Stored hash: {2}\n",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified", "DataSourceIntegrityIngestModule.process.verificationSuccess=Integrity of {0} verified",
"# {0} - imageName", "# {0} - imageName",
"DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",}) "DataSourceIntegrityIngestModule.process.verificationFailure={0} failed integrity verification",
})
@Override @Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
String imgName = dataSource.getName(); String imgName = dataSource.getName();
// Skip non-images // Skip non-images
@ -142,28 +132,29 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
if (size == 0) { if (size == 0) {
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName); //NON-NLS
} }
// Determine which mode we're in. // Determine which mode we're in.
// - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected) // - If there are any preset hashes, then we'll verify them (assuming the verify checkbox is selected)
// - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected) // - Otherwise we'll calculate and store all three hashes (assuming the compute checkbox is selected)
// First get a list of all stored hash types // First get a list of all stored hash types
try { try {
if (img.getMd5() != null && !img.getMd5().isEmpty()) { if (img.getMd5() != null && ! img.getMd5().isEmpty()) {
hashDataList.add(new HashData(HashType.MD5, img.getMd5())); hashDataList.add(new HashData(HashType.MD5, img.getMd5()));
} }
if (img.getSha1() != null && !img.getSha1().isEmpty()) { if (img.getSha1() != null && ! img.getSha1().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA1, img.getSha1())); hashDataList.add(new HashData(HashType.SHA1, img.getSha1()));
} }
if (img.getSha256() != null && !img.getSha256().isEmpty()) { if (img.getSha256() != null && ! img.getSha256().isEmpty()) {
hashDataList.add(new HashData(HashType.SHA256, img.getSha256())); hashDataList.add(new HashData(HashType.SHA256, img.getSha256()));
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName); String msg = Bundle.DataSourceIntegrityIngestModule_process_errorLoadingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex); logger.log(Level.SEVERE, msg, ex);
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
// Figure out which mode we should be in // Figure out which mode we should be in
Mode mode; Mode mode;
if (hashDataList.isEmpty()) { if (hashDataList.isEmpty()) {
@ -171,30 +162,30 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
} else { } else {
mode = Mode.VERIFY; mode = Mode.VERIFY;
} }
// If that mode was not enabled by the user, exit // If that mode was not enabled by the user, exit
if (mode.equals(Mode.COMPUTE) && !this.computeHashes) { if (mode.equals(Mode.COMPUTE) && ! this.computeHashes) {
logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS logger.log(Level.INFO, "Not computing hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName))); Bundle.DataSourceIntegrityIngestModule_process_skipCompute(imgName)));
return ProcessResult.OK; return ProcessResult.OK;
} else if (mode.equals(Mode.VERIFY) && !this.verifyHashes) { } else if (mode.equals(Mode.VERIFY) && ! this.verifyHashes) {
logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS logger.log(Level.INFO, "Not verifying hashes for {0} since the option was disabled", imgName); //NON-NLS
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName))); Bundle.DataSourceIntegrityIngestModule_process_skipVerify(imgName)));
return ProcessResult.OK; return ProcessResult.OK;
} }
// If we're in compute mode (i.e., the hash list is empty), add all hash algorithms // If we're in compute mode (i.e., the hash list is empty), add all hash algorithms
// to the list. // to the list.
if (mode.equals(Mode.COMPUTE)) { if (mode.equals(Mode.COMPUTE)) {
for (HashType type : HashType.values()) { for(HashType type : HashType.values()) {
hashDataList.add(new HashData(type, "")); hashDataList.add(new HashData(type, ""));
} }
} }
// Set up the digests // Set up the digests
for (HashData hashData : hashDataList) { for (HashData hashData:hashDataList) {
try { try {
hashData.digest = MessageDigest.getInstance(hashData.type.getName()); hashData.digest = MessageDigest.getInstance(hashData.type.getName());
} catch (NoSuchAlgorithmException ex) { } catch (NoSuchAlgorithmException ex) {
@ -204,7 +195,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
} }
// Libewf uses a chunk size of 64 times the sector size, which is the // Libewf uses a chunk size of 64 times the sector size, which is the
// motivation for using it here. For other images it shouldn't matter, // motivation for using it here. For other images it shouldn't matter,
// so they can use this chunk size as well. // so they can use this chunk size as well.
@ -221,13 +212,13 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS logger.log(Level.INFO, "Starting hash calculation for {0}", img.getName()); //NON-NLS
} }
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(), NbBundle.getMessage(this.getClass(),
"DataSourceIntegrityIngestModule.process.startingImg", "DataSourceIntegrityIngestModule.process.startingImg",
imgName))); imgName)));
// Set up the progress bar // Set up the progress bar
statusHelper.switchToDeterminate(totalChunks); statusHelper.switchToDeterminate(totalChunks);
// Read in byte size chunks and update the hash value with the data. // Read in byte size chunks and update the hash value with the data.
byte[] data = new byte[(int) chunkSize]; byte[] data = new byte[(int) chunkSize];
int read; int read;
@ -247,33 +238,33 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
// Only update with the read bytes. // Only update with the read bytes.
if (read == chunkSize) { if (read == chunkSize) {
for (HashData struct : hashDataList) { for (HashData struct:hashDataList) {
struct.digest.update(data); struct.digest.update(data);
} }
} else { } else {
byte[] subData = Arrays.copyOfRange(data, 0, read); byte[] subData = Arrays.copyOfRange(data, 0, read);
for (HashData struct : hashDataList) { for (HashData struct:hashDataList) {
struct.digest.update(subData); struct.digest.update(subData);
} }
} }
statusHelper.progress(i); statusHelper.progress(i);
} }
// Produce the final hashes // Produce the final hashes
for (HashData hashData : hashDataList) { for(HashData hashData:hashDataList) {
hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase(); hashData.calculatedHash = DatatypeConverter.printHexBinary(hashData.digest.digest()).toLowerCase();
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, hashData.calculatedHash}); //NON-NLS
} }
if (mode.equals(Mode.VERIFY)) { if (mode.equals(Mode.VERIFY)) {
// Check that each hash matches // Check that each hash matches
boolean verified = true; boolean verified = true;
String detailedResults = NbBundle String detailedResults = NbBundle
.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName); .getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.verifyResultsHeader", imgName);
String hashResults = ""; String hashResults = "";
String artifactComment = ""; String artifactComment = "";
for (HashData hashData : hashDataList) { for (HashData hashData:hashDataList) {
if (hashData.storedHash.equals(hashData.calculatedHash)) { if (hashData.storedHash.equals(hashData.calculatedHash)) {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " "; hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashMatch(hashData.type.name) + " ";
} else { } else {
@ -281,10 +272,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " "; hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashNonMatch(hashData.type.name) + " ";
artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name, artifactComment += Bundle.DataSourceIntegrityIngestModule_process_hashFailedForArtifact(hashData.type.name,
hashData.calculatedHash, hashData.storedHash) + " "; hashData.calculatedHash, hashData.storedHash) + " ";
} }
hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash); hashResults += Bundle.DataSourceIntegrityIngestModule_process_hashList(hashData.calculatedHash, hashData.storedHash);
} }
String verificationResultStr; String verificationResultStr;
String messageResultStr; String messageResultStr;
MessageType messageType; MessageType messageType;
@ -297,33 +288,31 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified"); verificationResultStr = NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.notVerified");
messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName); messageResultStr = Bundle.DataSourceIntegrityIngestModule_process_verificationFailure(imgName);
} }
detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr); detailedResults += NbBundle.getMessage(this.getClass(), "DataSourceIntegrityIngestModule.shutDown.resultLi", verificationResultStr);
detailedResults += hashResults; detailedResults += hashResults;
if (!verified) { if (!verified) {
try { try {
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId()); BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId());
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment)); DataSourceIntegrityModuleFactory.getModuleName(), artifactComment));
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(DataSourceIntegrityModuleFactory.getModuleName(),
blackboard.postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName()); BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex); logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting verification failed artifact to the blackboard", ex);
} }
} }
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),
messageResultStr, detailedResults)); messageResultStr, detailedResults));
} else { } else {
// Store the hashes in the database and update the image // Store the hashes in the database and update the image
try { try {
String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName); String results = Bundle.DataSourceIntegrityIngestModule_process_calculateHashDone(imgName);
for (HashData hashData : hashDataList) { for (HashData hashData:hashDataList) {
switch (hashData.type) { switch (hashData.type) {
case MD5: case MD5:
try { try {
@ -338,7 +327,7 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
} catch (TskDataException ex) { } catch (TskDataException ex) {
logger.log(Level.SEVERE, "Error setting calculated hash", ex); logger.log(Level.SEVERE, "Error setting calculated hash", ex);
} }
break; break;
case SHA256: case SHA256:
try { try {
img.setSha256(hashData.calculatedHash); img.setSha256(hashData.calculatedHash);
@ -351,11 +340,11 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
} }
results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash); results += Bundle.DataSourceIntegrityIngestModule_process_calcHashWithType(hashData.type.name, hashData.calculatedHash);
} }
// Write the inbox message // Write the inbox message
services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(), services.postMessage(IngestMessage.createMessage(MessageType.INFO, DataSourceIntegrityModuleFactory.getModuleName(),
imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results)); imgName + Bundle.DataSourceIntegrityIngestModule_process_hashesCalculated(), results));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName); String msg = Bundle.DataSourceIntegrityIngestModule_process_errorSavingHashes(imgName);
services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg)); services.postMessage(IngestMessage.createMessage(MessageType.ERROR, DataSourceIntegrityModuleFactory.getModuleName(), msg));
@ -363,10 +352,10 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
} }
return ProcessResult.OK; return ProcessResult.OK;
} }
/** /**
* Enum to track whether we're in computer or verify mode * Enum to track whether we're in computer or verify mode
*/ */
@ -374,37 +363,36 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
COMPUTE, COMPUTE,
VERIFY; VERIFY;
} }
/** /**
* Enum to hold the type of hash. The value in the "name" field should be * Enum to hold the type of hash.
* compatible with MessageDigest * The value in the "name" field should be compatible with MessageDigest
*/ */
private enum HashType { private enum HashType {
MD5("MD5"), MD5("MD5"),
SHA1("SHA-1"), SHA1("SHA-1"),
SHA256("SHA-256"); SHA256("SHA-256");
private final String name; // This should be the string expected by MessageDigest private final String name; // This should be the string expected by MessageDigest
HashType(String name) { HashType(String name) {
this.name = name; this.name = name;
} }
String getName() { String getName() {
return name; return name;
} }
} }
/** /**
* Utility class to hold data for a specific hash algorithm. * Utility class to hold data for a specific hash algorithm.
*/ */
private class HashData { private class HashData {
private HashType type; private HashType type;
private MessageDigest digest; private MessageDigest digest;
private String storedHash; private String storedHash;
private String calculatedHash; private String calculatedHash;
HashData(HashType type, String storedHash) { HashData(HashType type, String storedHash) {
this.type = type; this.type = type;
this.storedHash = storedHash; this.storedHash = storedHash;

View File

@ -11,7 +11,12 @@ ExtractArchiveWithPasswordAction.progress.text=Unpacking contents of archive: {0
ExtractArchiveWithPasswordAction.prompt.text=Enter Password ExtractArchiveWithPasswordAction.prompt.text=Enter Password
ExtractArchiveWithPasswordAction.prompt.title=Enter Password ExtractArchiveWithPasswordAction.prompt.title=Enter Password
OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\nContents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\nIf the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\nThe extracted files are navigable in the directory tree.\n\nThe module is supported on Windows, Linux and Mac operating systems. OpenIDE-Module-Long-Description=\
Embedded File Extraction Ingest Module\n\nThe Embedded File Extraction Ingest Module processes document files (such as doc, docx, ppt, pptx, xls, xlsx) and archive files (such as zip and others archive types supported by the 7zip extractor).\n\
Contents of these files are extracted and the derived files are added back to the current ingest to be processed by the configured ingest modules.\n\
If the derived file happens to be an archive file, it will be re-processed by the 7zip extractor - the extractor will process archive files N-levels deep.\n\n\
The extracted files are navigable in the directory tree.\n\n\
The module is supported on Windows, Linux and Mac operating systems.
OpenIDE-Module-Name=Embedded File Extraction OpenIDE-Module-Name=Embedded File Extraction
OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module OpenIDE-Module-Short-Description=Embedded File Extraction Ingest Module
EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0} EmbeddedFileExtractorIngestModule.SevenZipContentReadStream.seek.exception.invalidOrigin=Invalid seek origin: {0}

View File

@ -34,20 +34,21 @@ import java.util.logging.Level;
import net.sf.sevenzipjbinding.ArchiveFormat; import net.sf.sevenzipjbinding.ArchiveFormat;
import static net.sf.sevenzipjbinding.ArchiveFormat.RAR; import static net.sf.sevenzipjbinding.ArchiveFormat.RAR;
import net.sf.sevenzipjbinding.ExtractAskMode; import net.sf.sevenzipjbinding.ExtractAskMode;
import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.ICryptoGetTextPassword;
import net.sf.sevenzipjbinding.ISequentialOutStream; import net.sf.sevenzipjbinding.ISequentialOutStream;
import net.sf.sevenzipjbinding.ISevenZipInArchive; import net.sf.sevenzipjbinding.ISevenZipInArchive;
import net.sf.sevenzipjbinding.PropID;
import net.sf.sevenzipjbinding.SevenZip; import net.sf.sevenzipjbinding.SevenZip;
import net.sf.sevenzipjbinding.SevenZipException; import net.sf.sevenzipjbinding.SevenZipException;
import net.sf.sevenzipjbinding.SevenZipNativeInitializationException; import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.ICryptoGetTextPassword;
import net.sf.sevenzipjbinding.PropID;
import org.netbeans.api.progress.ProgressHandle; import org.netbeans.api.progress.ProgressHandle;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.FileUtil; import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
@ -57,43 +58,36 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMonitor; import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent; import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile; import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.EncodedFileOutputStream; import org.sleuthkit.datamodel.EncodedFileOutputStream;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
class SevenZipExtractor { class SevenZipExtractor {
private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName()); private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName());
private static final String MODULE_NAME = EmbeddedFileExtractorModuleFactory.getModuleName(); private IngestServices services = IngestServices.getInstance();
private final IngestJobContext context;
private final FileTypeDetector fileTypeDetector;
//encryption type strings //encryption type strings
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel"); "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class, private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull"); "EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
//zip bomb detection //zip bomb detection
private static final int MAX_DEPTH = 4; private static final int MAX_DEPTH = 4;
private static final int MAX_COMPRESSION_RATIO = 600; private static final int MAX_COMPRESSION_RATIO = 600;
private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L; private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L;
private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB
private IngestServices services = IngestServices.getInstance();
private final IngestJobContext context;
private final FileTypeDetector fileTypeDetector;
private String moduleDirRelative; private String moduleDirRelative;
private String moduleDirAbsolute; private String moduleDirAbsolute;
@ -246,43 +240,44 @@ class SevenZipExtractor {
*/ */
private void flagRootArchiveAsZipBomb(Archive rootArchive, AbstractFile archiveFile, String details, String escapedFilePath) { private void flagRootArchiveAsZipBomb(Archive rootArchive, AbstractFile archiveFile, String details, String escapedFilePath) {
rootArchive.flagAsZipBomb(); rootArchive.flagAsZipBomb();
logger.log(Level.INFO, details); logger.log(Level.INFO, details); //NON-NLS
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);
try { try {
Collection<BlackboardAttribute> attributes = Arrays.asList( Collection<BlackboardAttribute> attributes = new ArrayList<>();
new BlackboardAttribute( attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
TSK_SET_NAME, MODULE_NAME, "Possible Zip Bomb"));
"Possible Zip Bomb"), attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
new BlackboardAttribute( EmbeddedFileExtractorModuleFactory.getModuleName(),
TSK_DESCRIPTION, MODULE_NAME, Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())));
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())), attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
new BlackboardAttribute( EmbeddedFileExtractorModuleFactory.getModuleName(),
TSK_COMMENT, MODULE_NAME, details));
details));
if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) { SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(TSK_INTERESTING_FILE_HIT); org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(archiveFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes); artifact.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(artifact);
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName()); Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
} }
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
} }
} }
@ -468,11 +463,13 @@ class SevenZipExtractor {
} }
/** /**
* Unpack the file to local folder. * Unpack the file to local folder and return a list of derived files
* *
* @param archiveFile file to unpack * @param archiveFile file to unpack
* @param depthMap - a concurrent hashmap which keeps track of the depth * @param depthMap - a concurrent hashmap which keeps track of the depth
* of all nested archives, key of objectID * of all nested archives, key of objectID
*
* @return true if unpacking is complete
*/ */
void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) { void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) {
unpack(archiveFile, depthMap, null); unpack(archiveFile, depthMap, null);
@ -509,7 +506,7 @@ class SevenZipExtractor {
//recursion depth check for zip bomb //recursion depth check for zip bomb
Archive parentAr; Archive parentAr;
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
unpackSuccessful = false; unpackSuccessful = false;
@ -625,7 +622,7 @@ class SevenZipExtractor {
escapedArchiveFilePath, archiveItemPath); escapedArchiveFilePath, archiveItemPath);
String details = NbBundle.getMessage(SevenZipExtractor.class, String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details"); "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details)); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, archiveItemPath}); //NON-NLS logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, archiveItemPath}); //NON-NLS
logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS
unpackSuccessful = false; unpackSuccessful = false;
@ -653,7 +650,7 @@ class SevenZipExtractor {
localFile.createNewFile(); localFile.createNewFile();
} catch (IOException e) { } catch (IOException e) {
logger.log(Level.SEVERE, "Error creating extracted file: "//NON-NLS logger.log(Level.SEVERE, "Error creating extracted file: "//NON-NLS
+ localFile.getAbsolutePath(), e); + localFile.getAbsolutePath(), e);
} }
} }
} catch (SecurityException e) { } catch (SecurityException e) {
@ -688,7 +685,7 @@ class SevenZipExtractor {
//inArchiveItemIndex. False indicates non-test mode //inArchiveItemIndex. False indicates non-test mode
inArchive.extract(extractionIndices, false, archiveCallBack); inArchive.extract(extractionIndices, false, archiveCallBack);
unpackSuccessful &= archiveCallBack.wasSuccessful(); unpackSuccessful = unpackSuccessful & archiveCallBack.wasSuccessful();
archiveDetailsMap = null; archiveDetailsMap = null;
@ -729,7 +726,7 @@ class SevenZipExtractor {
String details = NbBundle.getMessage(SevenZipExtractor.class, String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details", "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
escapedArchiveFilePath, ex.getMessage()); escapedArchiveFilePath, ex.getMessage());
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details)); services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
} }
} finally { } finally {
if (inArchive != null) { if (inArchive != null) {
@ -759,21 +756,18 @@ class SevenZipExtractor {
String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL; String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL;
try { try {
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED); BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType)); artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType));
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(artifact);
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName()); Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
} }
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS
} }
@ -782,8 +776,8 @@ class SevenZipExtractor {
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg"); "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
String details = NbBundle.getMessage(SevenZipExtractor.class, String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details", "EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
currentArchiveName, MODULE_NAME); currentArchiveName, EmbeddedFileExtractorModuleFactory.getModuleName());
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details)); services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
} }
// adding unpacked extracted derived files to the job after closing relevant resources. // adding unpacked extracted derived files to the job after closing relevant resources.
@ -873,7 +867,7 @@ class SevenZipExtractor {
private final String localAbsPath; private final String localAbsPath;
private final String localRelPath; private final String localRelPath;
InArchiveItemDetails( public InArchiveItemDetails(
SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode, SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode,
String localAbsPath, String localRelPath) { String localAbsPath, String localRelPath) {
this.unpackedNode = unpackedNode; this.unpackedNode = unpackedNode;
@ -918,10 +912,10 @@ class SevenZipExtractor {
private boolean unpackSuccessful = true; private boolean unpackSuccessful = true;
StandardIArchiveExtractCallback(ISevenZipInArchive inArchive, public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
AbstractFile archiveFile, ProgressHandle progressHandle, AbstractFile archiveFile, ProgressHandle progressHandle,
Map<Integer, InArchiveItemDetails> archiveDetailsMap, Map<Integer, InArchiveItemDetails> archiveDetailsMap,
String password, long freeDiskSpace) { String password, long freeDiskSpace) {
this.inArchive = inArchive; this.inArchive = inArchive;
this.progressHandle = progressHandle; this.progressHandle = progressHandle;
@ -946,7 +940,7 @@ class SevenZipExtractor {
*/ */
@Override @Override
public ISequentialOutStream getStream(int inArchiveItemIndex, public ISequentialOutStream getStream(int inArchiveItemIndex,
ExtractAskMode mode) throws SevenZipException { ExtractAskMode mode) throws SevenZipException {
this.inArchiveItemIndex = inArchiveItemIndex; this.inArchiveItemIndex = inArchiveItemIndex;
@ -972,7 +966,7 @@ class SevenZipExtractor {
} }
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS + "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
return null; return null;
} }
@ -1004,7 +998,7 @@ class SevenZipExtractor {
: accessTime.getTime() / 1000; : accessTime.getTime() / 1000;
progressHandle.progress(archiveFile.getName() + ": " progressHandle.progress(archiveFile.getName() + ": "
+ (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH), + (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
inArchiveItemIndex); inArchiveItemIndex);
} }
@ -1019,7 +1013,6 @@ class SevenZipExtractor {
*/ */
@Override @Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException { public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
final SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode final SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode
= archiveDetailsMap.get(inArchiveItemIndex).getUnpackedNode(); = archiveDetailsMap.get(inArchiveItemIndex).getUnpackedNode();
final String localRelPath = archiveDetailsMap.get( final String localRelPath = archiveDetailsMap.get(
@ -1221,7 +1214,7 @@ class SevenZipExtractor {
if (existingFile == null) { if (existingFile == null) {
df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(), df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
node.isIsFile(), node.getParent().getFile(), "", MODULE_NAME, node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(),
"", "", TskData.EncodingType.XOR1); "", "", TskData.EncodingType.XOR1);
statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS)); statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS));
} else { } else {
@ -1235,7 +1228,7 @@ class SevenZipExtractor {
String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType(); String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType();
df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(), df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(), node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
node.isIsFile(), mimeType, "", MODULE_NAME, node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(),
"", "", TskData.EncodingType.XOR1); "", "", TskData.EncodingType.XOR1);
} else { } else {
//ALREADY CURRENT - SKIP //ALREADY CURRENT - SKIP
@ -1330,8 +1323,8 @@ class SevenZipExtractor {
} }
void addDerivedInfo(long size, void addDerivedInfo(long size,
boolean isFile, boolean isFile,
long ctime, long crtime, long atime, long mtime, String relLocalPath) { long ctime, long crtime, long atime, long mtime, String relLocalPath) {
this.size = size; this.size = size;
this.isFile = isFile; this.isFile = isFile;
this.ctime = ctime; this.ctime = ctime;

View File

@ -19,24 +19,26 @@
package org.sleuthkit.autopsy.modules.encryptiondetection; package org.sleuthkit.autopsy.modules.encryptiondetection;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.Volume; import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem; import org.sleuthkit.datamodel.VolumeSystem;
@ -55,9 +57,8 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/** /**
* Create an EncryptionDetectionDataSourceIngestModule object that will * Create an EncryptionDetectionDataSourceIngestModule object that will
* detect volumes that are encrypted and create blackboard artifacts as * detect volumes that are encrypted and create blackboard artifacts as
* appropriate. * appropriate. The supplied EncryptionDetectionIngestJobSettings object is
* * used to configure the module.
* @param settings The Settings used to configure the module.
*/ */
EncryptionDetectionDataSourceIngestModule(EncryptionDetectionIngestJobSettings settings) { EncryptionDetectionDataSourceIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy(); minimumEntropy = settings.getMinimumEntropy();
@ -66,7 +67,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
@Override @Override
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException { public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
validateSettings(); validateSettings();
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCase().getServices().getBlackboard();
this.context = context; this.context = context;
} }
@ -143,9 +144,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/** /**
* Create a blackboard artifact. * Create a blackboard artifact.
* *
* @param volume The volume to be processed. * @param volume The volume to be processed.
* @param artifactType The type of artifact to create. * @param artifactType The type of artifact to create.
* @param comment A comment to be attached to the artifact. * @param comment A comment to be attached to the artifact.
* *
* @return 'OK' if the volume was processed successfully, or 'ERROR' if * @return 'OK' if the volume was processed successfully, or 'ERROR' if
* there was a problem. * there was a problem.
@ -162,14 +163,18 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
try { try {
/* /*
* post the artifact which will index the artifact for keyword * Index the artifact for keyword search.
* search, and fire an event to notify UI of this new artifact
*/ */
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); blackboard.indexArtifact(artifact);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
} }
/*
* Send an event to update the view with the new result.
*/
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
/* /*
* Make an ingest inbox message. * Make an ingest inbox message.
*/ */

View File

@ -25,11 +25,13 @@ import com.healthmarketscience.jackcess.InvalidCredentialsException;
import com.healthmarketscience.jackcess.impl.CodecProvider; import com.healthmarketscience.jackcess.impl.CodecProvider;
import com.healthmarketscience.jackcess.impl.UnsupportedCodecException; import com.healthmarketscience.jackcess.impl.UnsupportedCodecException;
import com.healthmarketscience.jackcess.util.MemFileChannel; import com.healthmarketscience.jackcess.util.MemFileChannel;
import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.logging.Level;
import org.sleuthkit.datamodel.ReadContentInputStream;
import java.io.BufferedInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.nio.BufferUnderflowException; import java.nio.BufferUnderflowException;
import java.util.logging.Level;
import org.apache.tika.exception.EncryptedDocumentException; import org.apache.tika.exception.EncryptedDocumentException;
import org.apache.tika.exception.TikaException; import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata; import org.apache.tika.metadata.Metadata;
@ -39,18 +41,18 @@ import org.apache.tika.sax.BodyContentHandler;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException; import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -91,9 +93,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
/** /**
* Create a EncryptionDetectionFileIngestModule object that will detect * Create a EncryptionDetectionFileIngestModule object that will detect
* files that are either encrypted or password protected and create * files that are either encrypted or password protected and create
* blackboard artifacts as appropriate. * blackboard artifacts as appropriate. The supplied
* * EncryptionDetectionIngestJobSettings object is used to configure the
* @param settings The settings used to configure the module. * module.
*/ */
EncryptionDetectionFileIngestModule(EncryptionDetectionIngestJobSettings settings) { EncryptionDetectionFileIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy(); minimumEntropy = settings.getMinimumEntropy();
@ -106,9 +108,8 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException { public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
try { try {
validateSettings(); validateSettings();
this.context = context; this.context = context;
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
fileTypeDetector = new FileTypeDetector(); fileTypeDetector = new FileTypeDetector();
} catch (FileTypeDetector.FileTypeDetectorInitException ex) { } catch (FileTypeDetector.FileTypeDetectorInitException ex) {
throw new IngestModule.IngestModuleException("Failed to create file type detector", ex); throw new IngestModule.IngestModuleException("Failed to create file type detector", ex);
@ -130,12 +131,12 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* verify the file hasn't been deleted. * verify the file hasn't been deleted.
*/ */
if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR) && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR) && !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed) && (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
&& !file.getKnown().equals(TskData.FileKnown.KNOWN) && !file.getKnown().equals(TskData.FileKnown.KNOWN)
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) { && !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
/* /*
* Is the file in FILE_IGNORE_LIST? * Is the file in FILE_IGNORE_LIST?
*/ */
@ -205,14 +206,18 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
try { try {
/* /*
* post the artifact which will index the artifact for keyword * Index the artifact for keyword search.
* search, and fire an event to notify UI of this new artifact
*/ */
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName()); blackboard.indexArtifact(artifact);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
} }
/*
* Send an event to update the view with the new result.
*/
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
/* /*
* Make an ingest inbox message. * Make an ingest inbox message.
*/ */

View File

@ -1,7 +1,9 @@
CannotRunFileTypeDetection=Cannot run file type detection. CannotRunFileTypeDetection=Cannot run file type detection.
ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s). ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search.
OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Exif metadata ingest module. \n\nThe ingest module analyzes image files, extracts Exif information and posts the Exif data as results. OpenIDE-Module-Long-Description=\
Exif metadata ingest module. \n\n\
The ingest module analyzes image files, extracts Exif information and posts the Exif data as results.
OpenIDE-Module-Name=ExifParser OpenIDE-Module-Name=ExifParser
OpenIDE-Module-Short-Description=Exif metadata ingest module OpenIDE-Module-Short-Description=Exif metadata ingest module
ExifParserFileIngestModule.moduleName.text=Exif Parser ExifParserFileIngestModule.moduleName.text=Exif Parser

View File

@ -28,38 +28,39 @@ import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory; import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level; import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MAKE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MODEL;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image; import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException; import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM; import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@ -69,16 +70,20 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
* files. Ingests an image file and, if available, adds it's date, latitude, * files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact. * longitude, altitude, device model, and device make to a blackboard artifact.
*/ */
@NbBundle.Messages({"CannotRunFileTypeDetection=Cannot run file type detection."}) @NbBundle.Messages({
"CannotRunFileTypeDetection=Cannot run file type detection."
})
public final class ExifParserFileIngestModule implements FileIngestModule { public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private static final String MODULE_NAME = ExifParserModuleFactory.getModuleName(); private final IngestServices services = IngestServices.getInstance();
private final AtomicInteger filesProcessed = new AtomicInteger(0);
private long jobId; private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector; private FileTypeDetector fileTypeDetector;
private final HashSet<String> supportedMimeTypes = new HashSet<>(); private final HashSet<String> supportedMimeTypes = new HashSet<>();
private TimeZone timeZone = null; private TimeZone timeZone = null;
private Case currentCase;
private Blackboard blackboard; private Blackboard blackboard;
ExifParserFileIngestModule() { ExifParserFileIngestModule() {
@ -98,18 +103,18 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
} }
} }
@Messages({"ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s)."})
@Override @Override
public ProcessResult process(AbstractFile content) { public ProcessResult process(AbstractFile content) {
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); currentCase = Case.getCurrentCaseThrows();
blackboard = currentCase.getServices().getBlackboard();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
//skip unalloc //skip unalloc
if ((content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if ((content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) { || (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
return ProcessResult.OK; return ProcessResult.OK;
} }
@ -130,9 +135,14 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
return processFile(content); return processFile(content);
} }
private ProcessResult processFile(AbstractFile file) { @Messages({"ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search."})
ProcessResult processFile(AbstractFile file) {
InputStream in = null;
BufferedInputStream bin = null;
try (BufferedInputStream bin = new BufferedInputStream(new ReadContentInputStream(file));) { try {
in = new ReadContentInputStream(file);
bin = new BufferedInputStream(in);
Collection<BlackboardAttribute> attributes = new ArrayList<>(); Collection<BlackboardAttribute> attributes = new ArrayList<>();
Metadata metadata = ImageMetadataReader.readMetadata(bin); Metadata metadata = ImageMetadataReader.readMetadata(bin);
@ -155,7 +165,7 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
} }
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone); Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
if (date != null) { if (date != null) {
attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, MODULE_NAME, date.getTime() / 1000)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
} }
} }
@ -164,13 +174,15 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (gpsDir != null) { if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation(); GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) { if (loc != null) {
attributes.add(new BlackboardAttribute(TSK_GEO_LATITUDE, MODULE_NAME, loc.getLatitude())); double latitude = loc.getLatitude();
attributes.add(new BlackboardAttribute(TSK_GEO_LONGITUDE, MODULE_NAME, loc.getLongitude())); double longitude = loc.getLongitude();
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, ExifParserModuleFactory.getModuleName(), latitude));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, ExifParserModuleFactory.getModuleName(), longitude));
} }
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE); Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
if (altitude != null) { if (altitude != null) {
attributes.add(new BlackboardAttribute(TSK_GEO_ALTITUDE, MODULE_NAME, altitude.doubleValue())); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
} }
} }
@ -179,30 +191,36 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (devDir != null) { if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL); String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (StringUtils.isNotBlank(model)) { if (StringUtils.isNotBlank(model)) {
attributes.add(new BlackboardAttribute(TSK_DEVICE_MODEL, MODULE_NAME, model)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
} }
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE); String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
if (StringUtils.isNotBlank(make)) { if (StringUtils.isNotBlank(make)) {
attributes.add(new BlackboardAttribute(TSK_DEVICE_MAKE, MODULE_NAME, make)); attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
} }
} }
// Add the attributes, if there are any, to a new artifact // Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) { if (!attributes.isEmpty()) {
SleuthkitCase tskCase = currentCase.getSleuthkitCase();
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) { if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF, attributes)) {
BlackboardArtifact bba = file.newArtifact(TSK_METADATA_EXIF); BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
bba.addAttributes(attributes); bba.addAttributes(attributes);
try { try {
// index the artifact for keyword search // index the artifact for keyword search
blackboard.postArtifact(bba, MODULE_NAME); blackboard.indexArtifact(bba);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
Bundle.ExifParserFileIngestModule_indexError_message(), bba.getDisplayName()); Bundle.ExifParserFileIngestModule_indexError_message(), bba.getDisplayName());
} }
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(),
BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF,
Collections.singletonList(bba)));
} }
} }
@ -219,12 +237,24 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
} catch (IOException ex) { } catch (IOException ex) {
logger.log(Level.WARNING, String.format("IOException when parsing image file '%s/%s' (id=%d).", file.getParentPath(), file.getName(), file.getId()), ex); //NON-NLS logger.log(Level.WARNING, String.format("IOException when parsing image file '%s/%s' (id=%d).", file.getParentPath(), file.getName(), file.getId()), ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
} finally {
try {
if (in != null) {
in.close();
}
if (bin != null) {
bin.close();
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to close InputStream.", ex); //NON-NLS
return ProcessResult.ERROR;
}
} }
} }
/** /**
* Checks if should try to attempt to extract exif. Currently checks if * Checks if should try to attempt to extract exif. Currently checks if JPEG
* JPEG, TIFF or X-WAV (by signature) * image (by signature)
* *
* @param f file to be checked * @param f file to be checked
* *

View File

@ -36,27 +36,27 @@ FileExtMismatchSettingsPanel.jLabel1.text=File Types:
FileExtMismatchSettingsPanel.newExtButton.text=New Extension FileExtMismatchSettingsPanel.newExtButton.text=New Extension
FileExtMismatchSettingsPanel.newMimePrompt.message=Add a new MIME file type: FileExtMismatchSettingsPanel.newMimePrompt.message=Add a new MIME file type:
FileExtMismatchSettingsPanel.newMimePrompt.title=New MIME FileExtMismatchSettingsPanel.newMimePrompt.title=New MIME
FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty! FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.message=MIME type text is empty\!
FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.title=Empty type FileExtMismatchSettingsPanel.newMimePrompt.emptyMime.title=Empty type
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.message=MIME type not supported\!
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.title=Type not supported FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotSupported.title=Type not supported
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists! FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.message=MIME type already exists\!
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.title=Type already exists FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeExists.title=Type already exists
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.message=MIME type is not detectable by this module. FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.message=MIME type is not detectable by this module.
FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.title=Type not detectable FileExtMismatchSettingsPanel.newMimePrompt.mimeTypeNotDetectable.title=Type not detectable
FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected! FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.message=No MIME type selected\!
FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.title=No type selected FileExtMismatchSettingsPanel.removeTypeButton.noneSelected.title=No type selected
FileExtMismatchSettingsPanel.newExtPrompt.message=Add an allowed extension: FileExtMismatchSettingsPanel.newExtPrompt.message=Add an allowed extension:
FileExtMismatchSettingsPanel.newExtPrompt.title=New allowed extension FileExtMismatchSettingsPanel.newExtPrompt.title=New allowed extension
FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty! FileExtMismatchSettingsPanel.newExtPrompt.empty.message=Extension text is empty\!
FileExtMismatchSettingsPanel.newExtPrompt.empty.title=Extension text empty FileExtMismatchSettingsPanel.newExtPrompt.empty.title=Extension text empty
FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected! FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.message=No MIME type selected\!
FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.title=No MIME type selected FileExtMismatchSettingsPanel.newExtPrompt.noMimeType.title=No MIME type selected
FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists! FileExtMismatchSettingsPanel.newExtPrompt.extExists.message=Extension already exists\!
FileExtMismatchSettingsPanel.newExtPrompt.extExists.title=Extension already exists FileExtMismatchSettingsPanel.newExtPrompt.extExists.title=Extension already exists
FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected! FileExtMismatchSettingsPanel.removeExtButton.noneSelected.message=No extension selected\!
FileExtMismatchSettingsPanel.removeExtButton.noneSelected.title=No extension selected FileExtMismatchSettingsPanel.removeExtButton.noneSelected.title=No extension selected
FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected! FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.message=No MIME type selected\!
FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.title=No MIME type selected FileExtMismatchSettingsPanel.removeExtButton.noMimeTypeSelected.title=No MIME type selected
FileExtMismatchSettingsPanel.removeTypeButton.toolTipText= FileExtMismatchSettingsPanel.removeTypeButton.toolTipText=
FileExtMismatchModuleSettingsPanel.checkAllRadioButton.text=Check all file types FileExtMismatchModuleSettingsPanel.checkAllRadioButton.text=Check all file types

View File

@ -18,6 +18,7 @@
*/ */
package org.sleuthkit.autopsy.modules.fileextmismatch; package org.sleuthkit.autopsy.modules.fileextmismatch;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Set; import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
@ -25,6 +26,7 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
@ -32,10 +34,10 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleSettings.CHECK_TYPE; import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleSettings.CHECK_TYPE;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector; import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -108,7 +110,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
@Messages({"FileExtMismatchIngestModule.indexError.message=Failed to index file extension mismatch artifact for keyword search."}) @Messages({"FileExtMismatchIngestModule.indexError.message=Failed to index file extension mismatch artifact for keyword search."})
public ProcessResult process(AbstractFile abstractFile) { public ProcessResult process(AbstractFile abstractFile) {
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
@ -119,15 +121,15 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
// skip non-files // skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS) || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|| (abstractFile.isFile() == false)) { || (abstractFile.isFile() == false)) {
return ProcessResult.OK; return ProcessResult.OK;
} }
// deleted files often have content that was not theirs and therefor causes mismatch // deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC))
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) { || (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ProcessResult.OK; return ProcessResult.OK;
} }
@ -143,17 +145,14 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED); BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(bart);
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message()); MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());
} }
services.fireModuleDataEvent(new ModuleDataEvent(FileExtMismatchDetectorModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart)));
} }
return ProcessResult.OK; return ProcessResult.OK;
} catch (TskException ex) { } catch (TskException ex) {

View File

@ -18,42 +18,40 @@
*/ */
package org.sleuthkit.autopsy.modules.filetypeid; package org.sleuthkit.autopsy.modules.filetypeid;
import java.util.Arrays; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult; import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFileTypesException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
* Detects the type of a file based on signature (magic) values. Posts results * Detects the type of a file based on signature (magic) values. Posts results
* to the blackboard. * to the blackboard.
*/ */
@NbBundle.Messages({"CannotRunFileTypeDetection=Unable to run file type detection."}) @NbBundle.Messages({
"CannotRunFileTypeDetection=Unable to run file type detection."
})
public class FileTypeIdIngestModule implements FileIngestModule { public class FileTypeIdIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
private long jobId;
private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>(); private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>();
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private long jobId;
private FileTypeDetector fileTypeDetector; private FileTypeDetector fileTypeDetector;
/** /**
@ -148,34 +146,26 @@ public class FileTypeIdIngestModule implements FileIngestModule {
* @param fileType The file type rule for categorizing the hit. * @param fileType The file type rule for categorizing the hit.
*/ */
private void createInterestingFileHit(AbstractFile file, FileType fileType) { private void createInterestingFileHit(AbstractFile file, FileType fileType) {
List<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(),
fileType.getInterestingFilesSetName()),
new BlackboardAttribute(
TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(),
fileType.getMimeType()));
try { try {
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()));
attributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()));
Case currentCase = Case.getCurrentCaseThrows(); Case currentCase = Case.getCurrentCaseThrows();
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) { if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT); BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes); artifact.addAttributes(attributes);
try { try {
/* currentCase.getServices().getBlackboard().indexArtifact(artifact);
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of this
* new artifact
*/
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
} }
} }
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
@ -237,4 +227,5 @@ public class FileTypeIdIngestModule implements FileIngestModule {
long matchTime = 0; long matchTime = 0;
long numFiles = 0; long numFiles = 0;
} }
} }

View File

@ -40,7 +40,10 @@ ImportCentralRepoDbProgressDialog.errorParsingFile.message=Error parsing hash se
ImportCentralRepoDbProgressDialog.linesProcessed.message=\ hashes processed ImportCentralRepoDbProgressDialog.linesProcessed.message=\ hashes processed
ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress ImportCentralRepoDbProgressDialog.title.text=Central Repository Import Progress
OpenIDE-Module-Display-Category=Ingest Module OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=Hash Set ingest module. \n\nThe ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\nThe module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration. OpenIDE-Module-Long-Description=\
Hash Set ingest module. \n\n\
The ingest module analyzes files in the disk image and marks them as "known" (based on NSRL hashset lookup for "known" files) and "bad / interesting" (based on one or more hash sets supplied by the user).\n\n\
The module also contains additional non-ingest tools that are integrated in the GUI, such as file lookup by hash and hash set configuration.
OpenIDE-Module-Name=HashDatabases OpenIDE-Module-Name=HashDatabases
OptionsCategory_Name_HashDatabase=Hash Sets OptionsCategory_Name_HashDatabase=Hash Sets
OptionsCategory_Keywords_HashDatabase=Hash Sets OptionsCategory_Keywords_HashDatabase=Hash Sets
@ -169,7 +172,10 @@ HashDbSearchThread.name.searching=Searching
HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found. HashDbSearchThread.noMoreFilesWithMD5Msg=No other files with the same MD5 hash were found.
ModalNoButtons.indexingDbsTitle=Indexing hash sets ModalNoButtons.indexingDbsTitle=Indexing hash sets
ModalNoButtons.indexingDbTitle=Indexing hash set ModalNoButtons.indexingDbTitle=Indexing hash set
ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \nThe generated index will be left unusable. If you choose to continue,\nplease delete the corresponding -md5.idx file in the hash folder.\nExit indexing? ModalNoButtons.exitHashDbIndexingMsg=You are about to exit out of indexing your hash sets. \n\
The generated index will be left unusable. If you choose to continue,\n\
please delete the corresponding -md5.idx file in the hash folder.\n\
Exit indexing?
ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing ModalNoButtons.dlgTitle.unfinishedIndexing=Unfinished Indexing
ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set ModalNoButtons.indexThis.currentlyIndexing1Db=Currently indexing 1 hash set
ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0} ModalNoButtons.indexThese.currentlyIndexing1OfNDbs=Currently indexing 1 of {0}

View File

@ -21,6 +21,7 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
@ -29,6 +30,7 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor; import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
@ -37,9 +39,9 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb; import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
@ -100,7 +102,7 @@ public class HashDbIngestModule implements FileIngestModule {
* object is used to configure the module. * object is used to configure the module.
* *
* @param settings The module settings. * @param settings The module settings.
* *
* @throws NoCurrentCaseException If there is no open case. * @throws NoCurrentCaseException If there is no open case.
*/ */
HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException { HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
@ -168,7 +170,7 @@ public class HashDbIngestModule implements FileIngestModule {
@Override @Override
public ProcessResult process(AbstractFile file) { public ProcessResult process(AbstractFile file) {
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
@ -176,7 +178,7 @@ public class HashDbIngestModule implements FileIngestModule {
// Skip unallocated space files. // Skip unallocated space files.
if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) { || file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
return ProcessResult.OK; return ProcessResult.OK;
} }
@ -354,11 +356,8 @@ public class HashDbIngestModule implements FileIngestModule {
badFile.addAttributes(attributes); badFile.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for keyword blackboard.indexArtifact(badFile);
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(badFile, moduleName);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
@ -401,6 +400,7 @@ public class HashDbIngestModule implements FileIngestModule {
abstractFile.getName() + md5Hash, abstractFile.getName() + md5Hash,
badFile)); badFile));
} }
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
} catch (TskException ex) { } catch (TskException ex) {
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
} }
@ -414,7 +414,7 @@ public class HashDbIngestModule implements FileIngestModule {
* @param knownHashSets The list of hash sets for "known" files. * @param knownHashSets The list of hash sets for "known" files.
*/ */
private static synchronized void postSummary(long jobId, private static synchronized void postSummary(long jobId,
List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) { List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId); IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
totalsForIngestJobs.remove(jobId); totalsForIngestJobs.remove(jobId);

View File

@ -30,12 +30,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException; import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
@ -58,7 +58,7 @@ final class CallLogAnalyzer {
/** /**
* Find call logs given an ingest job context and index the results. * Find call logs given an ingest job context and index the results.
* *
* @param context The ingest job context. * @param context The ingest job context.
*/ */
public void findCallLogs(IngestJobContext context) { public void findCallLogs(IngestJobContext context) {
@ -69,7 +69,7 @@ final class CallLogAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return; return;
} }
blackboard = openCase.getSleuthkitCase().getBlackboard(); blackboard = openCase.getServices().getBlackboard();
List<AbstractFile> absFiles; List<AbstractFile> absFiles;
try { try {
SleuthkitCase skCase = openCase.getSleuthkitCase(); SleuthkitCase skCase = openCase.getSleuthkitCase();
@ -98,7 +98,7 @@ final class CallLogAnalyzer {
/** /**
* Index results for call logs found in the database. * Index results for call logs found in the database.
* *
* @param DatabasePath The path to the database. * @param DatabasePath The path to the database.
* @param fileId The ID of the file associated with artifacts. * @param fileId The ID of the file associated with artifacts.
*/ */
@ -162,12 +162,8 @@ final class CallLogAnalyzer {
bba.addAttributes(attributes); bba.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(bba);
* keyword search, and fire an event to notify UI of
* this new artifact
*/
blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
@ -188,5 +184,7 @@ final class CallLogAnalyzer {
} catch (Exception e) { } catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); //NON-NLS logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); //NON-NLS
} }
} }
} }

View File

@ -35,12 +35,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
@ -74,8 +74,8 @@ final class ContactAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return; return;
} }
blackboard = openCase.getSleuthkitCase().getBlackboard(); blackboard = openCase.getServices().getBlackboard();
List<AbstractFile> absFiles; List<AbstractFile> absFiles;
try { try {
SleuthkitCase skCase = openCase.getSleuthkitCase(); SleuthkitCase skCase = openCase.getSleuthkitCase();
@ -112,7 +112,7 @@ final class ContactAnalyzer {
if (DatabasePath == null || DatabasePath.isEmpty()) { if (DatabasePath == null || DatabasePath.isEmpty()) {
return; return;
} }
Case currentCase; Case currentCase;
try { try {
currentCase = Case.getCurrentCaseThrows(); currentCase = Case.getCurrentCaseThrows();
@ -120,7 +120,7 @@ final class ContactAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return; return;
} }
Statement statement = null; Statement statement = null;
try { try {
Class.forName("org.sqlite.JDBC"); //NON-NLS //load JDBC driver Class.forName("org.sqlite.JDBC"); //NON-NLS //load JDBC driver
@ -174,12 +174,8 @@ final class ContactAnalyzer {
bba.addAttributes(attributes); bba.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(bba);
* keyword search, and fire an event to notify UI of
* this new artifact
*/
blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(

View File

@ -31,12 +31,12 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream; import org.sleuthkit.datamodel.ReadContentInputStream;
@ -73,7 +73,7 @@ class TextMessageAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return; return;
} }
blackboard = openCase.getSleuthkitCase().getBlackboard(); blackboard = openCase.getServices().getBlackboard();
try { try {
SleuthkitCase skCase = openCase.getSleuthkitCase(); SleuthkitCase skCase = openCase.getSleuthkitCase();
absFiles = skCase.findAllFilesWhere("name ='mmssms.db'"); //NON-NLS //get exact file name absFiles = skCase.findAllFilesWhere("name ='mmssms.db'"); //NON-NLS //get exact file name
@ -168,11 +168,8 @@ class TextMessageAnalyzer {
bba.addAttributes(attributes); bba.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for blackboard.indexArtifact(bba);
* keyword search, and fire an event to notify UI of
* this new artifact
*/ blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error( MessageNotifyUtil.Notify.error(
@ -194,5 +191,7 @@ class TextMessageAnalyzer {
} catch (Exception e) { } catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing text messages to Blackboard", e); //NON-NLS logger.log(Level.SEVERE, "Error parsing text messages to Blackboard", e); //NON-NLS
} }
} }
} }

View File

@ -81,8 +81,8 @@ FilesSetRulePanel.nameTextField.text=
FilesSetRulePanel.ruleNameLabel.text=Rule Name (Optional): FilesSetRulePanel.ruleNameLabel.text=Rule Name (Optional):
FilesSetRulePanel.messages.emptyNameCondition=You must specify a name pattern for this rule. FilesSetRulePanel.messages.emptyNameCondition=You must specify a name pattern for this rule.
FilesSetRulePanel.messages.invalidNameRegex=The name regular expression is not valid:\n\n{0} FilesSetRulePanel.messages.invalidNameRegex=The name regular expression is not valid:\n\n{0}
FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, ", <, or > unless it is a regular expression. FilesSetRulePanel.messages.invalidCharInName=The name cannot contain \\, /, :, *, ?, \", <, or > unless it is a regular expression.
FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, ", <, or > unless it is a regular expression. FilesSetRulePanel.messages.invalidCharInPath=The path cannot contain \\, :, *, ?, \", <, or > unless it is a regular expression.
FilesSetRulePanel.messages.invalidPathRegex=The path regular expression is not valid:\n\n{0} FilesSetRulePanel.messages.invalidPathRegex=The path regular expression is not valid:\n\n{0}
FilesSetDefsPanel.doFileSetsDialog.duplicateRuleSet.text=Rule set with name {0} already exists. FilesSetDefsPanel.doFileSetsDialog.duplicateRuleSet.text=Rule set with name {0} already exists.
FilesSetRulePanel.pathSeparatorInfoLabel.text=Use / as path separator FilesSetRulePanel.pathSeparatorInfoLabel.text=Use / as path separator

View File

@ -19,8 +19,8 @@
package org.sleuthkit.autopsy.modules.interestingitems; package org.sleuthkit.autopsy.modules.interestingitems;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -29,6 +29,7 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.FileIngestModule;
@ -36,13 +37,10 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter; import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
@ -50,15 +48,15 @@ import org.sleuthkit.datamodel.TskData;
* A file ingest module that generates interesting files set hit artifacts for * A file ingest module that generates interesting files set hit artifacts for
* files that match interesting files set definitions. * files that match interesting files set definitions.
*/ */
@NbBundle.Messages({"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."}) @NbBundle.Messages({
"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."
})
final class FilesIdentifierIngestModule implements FileIngestModule { final class FilesIdentifierIngestModule implements FileIngestModule {
private static final Object sharedResourcesLock = new Object(); private static final Object sharedResourcesLock = new Object();
private static final Logger logger = Logger.getLogger(FilesIdentifierIngestModule.class.getName()); private static final Logger logger = Logger.getLogger(FilesIdentifierIngestModule.class.getName());
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter(); private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final Map<Long, List<FilesSet>> interestingFileSetsByJob = new ConcurrentHashMap<>(); private static final Map<Long, List<FilesSet>> interestingFileSetsByJob = new ConcurrentHashMap<>();
private static final String MODULE_NAME = InterestingItemsIngestModuleFactory.getModuleName();
private final FilesIdentifierIngestJobSettings settings; private final FilesIdentifierIngestJobSettings settings;
private final IngestServices services = IngestServices.getInstance(); private final IngestServices services = IngestServices.getInstance();
private IngestJobContext context; private IngestJobContext context;
@ -74,6 +72,9 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
this.settings = settings; this.settings = settings;
} }
/**
* @inheritDoc
*/
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context; this.context = context;
@ -99,16 +100,21 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
} }
} }
/**
* @inheritDoc
*/
@Override @Override
@Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."}) @Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."})
public ProcessResult process(AbstractFile file) { public ProcessResult process(AbstractFile file) {
Case currentCase;
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR; return ProcessResult.ERROR;
} }
blackboard = currentCase.getServices().getBlackboard();
// Skip slack space files. // Skip slack space files.
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) { if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) {
return ProcessResult.OK; return ProcessResult.OK;
@ -120,46 +126,48 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
String ruleSatisfied = filesSet.fileIsMemberOf(file); String ruleSatisfied = filesSet.fileIsMemberOf(file);
if (ruleSatisfied != null) { if (ruleSatisfied != null) {
try { try {
// Post an interesting files set hit artifact to the
// blackboard.
String moduleName = InterestingItemsIngestModuleFactory.getModuleName();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
Collection<BlackboardAttribute> attributes = Arrays.asList( // Add a set name attribute to the artifact. This adds a
/* // fair amount of redundant data to the attributes table
* Add a set name attribute to the artifact. This // (i.e., rows that differ only in artifact id), but doing
* adds a fair amount of redundant data to the // otherwise would requires reworking the interesting files
* attributes table (i.e., rows that differ only in // set hit artifact.
* artifact id), but doing otherwise would requires BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, filesSet.getName());
* reworking the interesting files set hit artifact. */ attributes.add(setNameAttribute);
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
filesSet.getName()),
/*
* Add a category attribute to the artifact to
* record the interesting files set membership rule
* that was satisfied. */
new BlackboardAttribute(
TSK_CATEGORY, MODULE_NAME,
ruleSatisfied)
);
// Add a category attribute to the artifact to record the
// interesting files set membership rule that was satisfied.
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, moduleName, ruleSatisfied);
attributes.add(ruleNameAttribute);
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) { if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT); BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes); artifact.addAttributes(attributes);
try { try {
// index the artifact for keyword search
// Post thet artifact to the blackboard. blackboard.indexArtifact(artifact);
blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName()); MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
} }
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
// make an ingest inbox message // make an ingest inbox message
StringBuilder detailsSb = new StringBuilder(); StringBuilder detailsSb = new StringBuilder();
detailsSb.append("File: ").append(file.getParentPath()).append(file.getName()).append("<br/>\n"); detailsSb.append("File: " + file.getParentPath() + file.getName() + "<br/>\n");
detailsSb.append("Rule Set: ").append(filesSet.getName()); detailsSb.append("Rule Set: " + filesSet.getName());
services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(), services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
"Interesting File Match: " + filesSet.getName() + "(" + file.getName() + ")", "Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
detailsSb.toString(), detailsSb.toString(),
file.getName(), file.getName(),
artifact)); artifact));
@ -172,6 +180,9 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
return ProcessResult.OK; return ProcessResult.OK;
} }
/**
* @inheritDoc
*/
@Override @Override
public void shutDown() { public void shutDown() {
if (context != null) { if (context != null) {

View File

@ -21,7 +21,7 @@ PhotoRecIngestModule.complete.totalParsetime=Total Parsing Time:
PhotoRecIngestModule.complete.photoRecResults=PhotoRec Results PhotoRecIngestModule.complete.photoRecResults=PhotoRec Results
PhotoRecIngestModule.NotEnoughDiskSpace.detail.msg=PhotoRec error processing {0} with {1} Not enough space on primary disk to save unallocated space. PhotoRecIngestModule.NotEnoughDiskSpace.detail.msg=PhotoRec error processing {0} with {1} Not enough space on primary disk to save unallocated space.
PhotoRecIngestModule.cancelledByUser=PhotoRec cancelled by user. PhotoRecIngestModule.cancelledByUser=PhotoRec cancelled by user.
PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value = {0} when scanning {1} PhotoRecIngestModule.error.exitValue=PhotoRec carver returned error exit value \= {0} when scanning {1}
PhotoRecIngestModule.error.msg=Error processing {0} with PhotoRec carver. PhotoRecIngestModule.error.msg=Error processing {0} with PhotoRec carver.
PhotoRecIngestModule.complete.numberOfErrors=Number of Errors while Carving: PhotoRecIngestModule.complete.numberOfErrors=Number of Errors while Carving:
PhotoRecCarverIngestJobSettingsPanel.detectionSettingsLabel.text=PhotoRec Settings PhotoRecCarverIngestJobSettingsPanel.detectionSettingsLabel.text=PhotoRec Settings

View File

@ -1,3 +0,0 @@
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All plaso parsers except chrome_cache and the ones listed below are run. chrome_cache duplicates data collected by the RecentActivity module. The parsers below add significantly to the processing time and should only be enabled if the events they produce are needed.

View File

@ -1,29 +0,0 @@
# {0} - file that events are from
PlasoIngestModule.artifact.progress=Adding events to case: {0}
PlasoIngestModule.bad.imageFile=Cannot find image file name and path
PlasoIngestModule.completed=Plaso Processing Completed
PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation
PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image.
PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.
PlasoIngestModule.error.posting.artifact=Error Posting Artifact
PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.
PlasoIngestModule.error.running.psort=Error running Psort, see log file.
PlasoIngestModule.event.datetime=Event Date Time
PlasoIngestModule.event.description=Event Description
PlasoIngestModule.exception.adding.artifact=Exception Adding Artifact
PlasoIngestModule.exception.database.error=Error while trying to read into a sqlite db.
PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.
PlasoIngestModule.executable.not.found=Plaso Executable Not Found.
PlasoIngestModule.has.run=Plaso Plugin has been run.
PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled
PlasoIngestModule.psort.cancelled=psort run was canceled
PlasoIngestModule.requires.windows=Plaso module requires windows.
PlasoIngestModule.running.psort=Running Psort
PlasoIngestModule.starting.log2timeline=Starting Log2timeline
PlasoIngestModule_exception_find_file=Exception finding file.
PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings
PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source.
PlasoModuleFactory_moduleName=Plaso
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All plaso parsers except chrome_cache and the ones listed below are run. chrome_cache duplicates data collected by the RecentActivity module. The parsers below add significantly to the processing time and should only be enabled if the events they produce are needed.

View File

@ -1,444 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import static java.util.Objects.nonNull;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.modules.InstalledFileLocator;
import org.openide.util.Cancellable;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventType;
/**
* Data source ingest module that runs Plaso against the image.
*/
public class PlasoIngestModule implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(PlasoIngestModule.class.getName());
private static final String MODULE_NAME = PlasoModuleFactory.getModuleName();
private static final String PLASO = "plaso"; //NON-NLS
private static final String PLASO64 = "plaso-20180818-amd64";//NON-NLS
private static final String PLASO32 = "plaso-20180818-win32";//NON-NLS
private static final String LOG2TIMELINE_EXECUTABLE = "Log2timeline.exe";//NON-NLS
private static final String PSORT_EXECUTABLE = "psort.exe";//NON-NLS
private static final String COOKIE = "cookie";//NON-NLS
private static final int LOG2TIMELINE_WORKERS = 2;
private File log2TimeLineExecutable;
private File psortExecutable;
private final PlasoModuleSettings settings;
private IngestJobContext context;
private Case currentCase;
private FileManager fileManager;
private Image image;
private AbstractFile previousFile = null; // cache used when looking up files in Autopsy DB
PlasoIngestModule(PlasoModuleSettings settings) {
this.settings = settings;
}
@NbBundle.Messages({
"PlasoIngestModule.executable.not.found=Plaso Executable Not Found.",
"PlasoIngestModule.requires.windows=Plaso module requires windows.",
"PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image."})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
if (false == PlatformUtil.isWindowsOS()) {
throw new IngestModuleException(Bundle.PlasoIngestModule_requires_windows());
}
try {
log2TimeLineExecutable = locateExecutable(LOG2TIMELINE_EXECUTABLE);
psortExecutable = locateExecutable(PSORT_EXECUTABLE);
} catch (FileNotFoundException exception) {
logger.log(Level.WARNING, "Plaso executable not found.", exception); //NON-NLS
throw new IngestModuleException(Bundle.PlasoIngestModule_executable_not_found(), exception);
}
Content dataSource = context.getDataSource();
if (!(dataSource instanceof Image)) {
throw new IngestModuleException(Bundle.PlasoIngestModule_dataSource_not_an_image());
}
image = (Image) dataSource;
}
@NbBundle.Messages({
"PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.",
"PlasoIngestModule.error.running.psort=Error running Psort, see log file.",
"PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.",
"PlasoIngestModule.starting.log2timeline=Starting Log2timeline",
"PlasoIngestModule.running.psort=Running Psort",
"PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled",
"PlasoIngestModule.psort.cancelled=psort run was canceled",
"PlasoIngestModule.bad.imageFile=Cannot find image file name and path",
"PlasoIngestModule.completed=Plaso Processing Completed",
"PlasoIngestModule.has.run=Plaso Plugin has been run."})
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
assert dataSource.equals(image);
statusHelper.switchToDeterminate(100);
currentCase = Case.getCurrentCase();
fileManager = currentCase.getServices().getFileManager();
String currentTime = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss z", Locale.US).format(System.currentTimeMillis());//NON-NLS
Path moduleOutputPath = Paths.get(currentCase.getModuleDirectory(), PLASO, currentTime);
try {
Files.createDirectories(moduleOutputPath);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error creating Plaso module output directory.", ex); //NON-NLS
return ProcessResult.ERROR;
}
// Run log2timeline
logger.log(Level.INFO, "Starting Plaso Run.");//NON-NLS
statusHelper.progress(Bundle.PlasoIngestModule_starting_log2timeline(), 0);
ProcessBuilder log2TimeLineCommand = buildLog2TimeLineCommand(moduleOutputPath, image);
try {
Process log2TimeLineProcess = log2TimeLineCommand.start();
try (BufferedReader log2TimeLineOutpout = new BufferedReader(new InputStreamReader(log2TimeLineProcess.getInputStream()))) {
L2TStatusProcessor statusReader = new L2TStatusProcessor(log2TimeLineOutpout, statusHelper, moduleOutputPath);
new Thread(statusReader, "log2timeline status reader").start(); //NON-NLS
ExecUtil.waitForTermination(LOG2TIMELINE_EXECUTABLE, log2TimeLineProcess, new DataSourceIngestModuleProcessTerminator(context));
statusReader.cancel();
}
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Log2timeline run was canceled"); //NON-NLS
return ProcessResult.OK;
}
if (Files.notExists(moduleOutputPath.resolve(PLASO))) {
logger.log(Level.WARNING, "Error running log2timeline: there was no storage file."); //NON-NLS
return ProcessResult.ERROR;
}
// sort the output
statusHelper.progress(Bundle.PlasoIngestModule_running_psort(), 33);
ProcessBuilder psortCommand = buildPsortCommand(moduleOutputPath);
ExecUtil.execute(psortCommand, new DataSourceIngestModuleProcessTerminator(context));
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "psort run was canceled"); //NON-NLS
return ProcessResult.OK;
}
Path plasoFile = moduleOutputPath.resolve("plasodb.db3"); //NON-NLS
if (Files.notExists(plasoFile)) {
logger.log(Level.SEVERE, "Error running Psort: there was no sqlite db file."); //NON-NLS
return ProcessResult.ERROR;
}
// parse the output and make artifacts
createPlasoArtifacts(plasoFile.toString(), statusHelper);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error running Plaso.", ex);//NON-NLS
return ProcessResult.ERROR;
}
IngestMessage message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
Bundle.PlasoIngestModule_has_run(),
Bundle.PlasoIngestModule_completed());
IngestServices.getInstance().postMessage(message);
return ProcessResult.OK;
}
private ProcessBuilder buildLog2TimeLineCommand(Path moduleOutputPath, Image image) {
//make a csv list of disabled parsers.
String parsersString = settings.getParsers().entrySet().stream()
.filter(entry -> entry.getValue() == false)
.map(entry -> "!" + entry.getKey()) // '!' prepended to parsername disables it. //NON-NLS
.collect(Collectors.joining(","));//NON-NLS
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
"\"" + log2TimeLineExecutable + "\"", //NON-NLS
"--vss-stores", "all", //NON-NLS
"-z", image.getTimeZone(), //NON-NLS
"--partitions", "all", //NON-NLS
"--hasher_file_size_limit", "1", //NON-NLS
"--hashers", "none", //NON-NLS
"--parsers", "\"" + parsersString + "\"",//NON-NLS
"--no_dependencies_check", //NON-NLS
"--workers", String.valueOf(LOG2TIMELINE_WORKERS),//NON-NLS
moduleOutputPath.resolve(PLASO).toString(),
image.getPaths()[0]
);
processBuilder.redirectError(moduleOutputPath.resolve("log2timeline_err.txt").toFile()); //NON-NLS
return processBuilder;
}
static private ProcessBuilder buildProcessWithRunAsInvoker(String... commandLine) {
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
/* Add an environment variable to force log2timeline/psort to run with
* the same permissions Autopsy uses. */
processBuilder.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS
return processBuilder;
}
private ProcessBuilder buildPsortCommand(Path moduleOutputPath) {
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
"\"" + psortExecutable + "\"", //NON-NLS
"-o", "4n6time_sqlite", //NON-NLS
"-w", moduleOutputPath.resolve("plasodb.db3").toString(), //NON-NLS
moduleOutputPath.resolve(PLASO).toString()
);
processBuilder.redirectOutput(moduleOutputPath.resolve("psort_output.txt").toFile()); //NON-NLS
processBuilder.redirectError(moduleOutputPath.resolve("psort_err.txt").toFile()); //NON-NLS
return processBuilder;
}
private static File locateExecutable(String executableName) throws FileNotFoundException {
String architectureFolder = PlatformUtil.is64BitOS() ? PLASO64 : PLASO32;
String executableToFindName = Paths.get(PLASO, architectureFolder, executableName).toString();
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PlasoIngestModule.class.getPackage().getName(), false);
if (null == exeFile || exeFile.canExecute() == false) {
throw new FileNotFoundException(executableName + " executable not found.");
}
return exeFile;
}
@NbBundle.Messages({
"PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.",
"PlasoIngestModule.event.datetime=Event Date Time",
"PlasoIngestModule.event.description=Event Description",
"PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation ",
"# {0} - file that events are from",
"PlasoIngestModule.artifact.progress=Adding events to case: {0}"})
private void createPlasoArtifacts(String plasoDb, DataSourceIngestModuleProgress statusHelper) {
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
String sqlStatement = "SELECT substr(filename,1) AS filename, "
+ " strftime('%s', datetime) AS epoch_date, "
+ " description, "
+ " source, "
+ " type, "
+ " sourcetype "
+ " FROM log2timeline "
+ " WHERE source NOT IN ('FILE', "
+ " 'WEBHIST') " // bad dates and duplicates with what we have.
+ " AND sourcetype NOT IN ('UNKNOWN', "
+ " 'PE Import Time');"; // lots of bad dates //NON-NLS
try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + plasoDb); //NON-NLS
ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) {
while (resultSet.next()) {
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Cancelled Plaso Artifact Creation."); //NON-NLS
return;
}
String currentFileName = resultSet.getString("filename"); //NON-NLS
statusHelper.progress(Bundle.PlasoIngestModule_artifact_progress(currentFileName), 66);
Content resolvedFile = getAbstractFile(currentFileName);
if (resolvedFile == null) {
logger.log(Level.INFO, "File {0} from Plaso output not found in case. Associating it with the data source instead.", currentFileName);//NON-NLS
resolvedFile = image;
}
Collection<BlackboardAttribute> bbattributes = Arrays.asList(
new BlackboardAttribute(
TSK_DATETIME, MODULE_NAME,
resultSet.getLong("epoch_date")), //NON-NLS
new BlackboardAttribute(
TSK_DESCRIPTION, MODULE_NAME,
resultSet.getString("description")),//NON-NLS
new BlackboardAttribute(
TSK_TL_EVENT_TYPE, MODULE_NAME,
findEventSubtype(currentFileName, resultSet)));
try {
BlackboardArtifact bbart = resolvedFile.newArtifact(TSK_TL_EVENT);
bbart.addAttributes(bbattributes);
try {
/* Post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of
* this new artifact */
blackboard.postArtifact(bbart, MODULE_NAME);
} catch (BlackboardException ex) {
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS
}
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS
}
}
private AbstractFile getAbstractFile(String file) {
Path path = Paths.get(file);
String fileName = path.getFileName().toString();
String filePath = path.getParent().toString().replaceAll("\\\\", "/");//NON-NLS
if (filePath.endsWith("/") == false) {//NON-NLS
filePath += "/";//NON-NLS
}
// check the cached file
//TODO: would we reduce 'cache misses' if we retrieved the events sorted by file? Is that overhead worth it?
if (previousFile != null
&& previousFile.getName().equalsIgnoreCase(fileName)
&& previousFile.getParentPath().equalsIgnoreCase(filePath)) {
return previousFile;
}
try {
List<AbstractFile> abstractFiles = fileManager.findFiles(fileName, filePath);
if (abstractFiles.size() == 1) {// TODO: why do we bother with this check. also we don't cache the file...
return abstractFiles.get(0);
}
for (AbstractFile resolvedFile : abstractFiles) {
// double check its an exact match
if (filePath.equalsIgnoreCase(resolvedFile.getParentPath())) {
// cache it for next time
previousFile = resolvedFile;
return resolvedFile;
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception finding file.", ex);
}
return null;
}
/**
* Determine the event_type_id of the event from the plaso information.
*
* @param fileName The name of the file this event is from.
* @param row The row returned from the log2timeline table of th eplaso
* output.
*
* @return the event_type_id of the EventType of the given event.
*
* @throws SQLException
*/
private long findEventSubtype(String fileName, ResultSet row) throws SQLException {
switch (row.getString("source")) {
case "WEBHIST": //These shouldn't actually be present, but keeping the logic just in case...
if (fileName.toLowerCase().contains(COOKIE)
|| row.getString("type").toLowerCase().contains(COOKIE)) {//NON-NLS
return EventType.WEB_COOKIE.getTypeID();
} else {
return EventType.WEB_HISTORY.getTypeID();
}
case "EVT":
case "LOG":
return EventType.LOG_ENTRY.getTypeID();
case "REG":
switch (row.getString("sourcetype").toLowerCase()) {//NON-NLS
case "unknown : usb entries":
case "unknown : usbstor entries":
return EventType.DEVICES_ATTACHED.getTypeID();
default:
return EventType.REGISTRY.getTypeID();
}
default:
return EventType.OTHER.getTypeID();
}
}
/**
* Runs in a thread and reads the output of log2timeline. It redirectes the
* output both to a log file, and to the status message of the Plaso ingest
* module progress bar.
*/
private static class L2TStatusProcessor implements Runnable, Cancellable {
private final BufferedReader log2TimeLineOutpout;
private final DataSourceIngestModuleProgress statusHelper;
volatile private boolean cancelled = false;
private final Path outputPath;
private L2TStatusProcessor(BufferedReader log2TimeLineOutpout, DataSourceIngestModuleProgress statusHelper, Path outputPath) throws IOException {
this.log2TimeLineOutpout = log2TimeLineOutpout;
this.statusHelper = statusHelper;
this.outputPath = outputPath;
}
@Override
public void run() {
try (BufferedWriter writer = Files.newBufferedWriter(outputPath.resolve("log2timeline_output.txt"));) {//NON-NLS
String line = log2TimeLineOutpout.readLine();
while (cancelled == false && nonNull(line)) {
statusHelper.progress(line);
writer.write(line);
writer.newLine();
line = log2TimeLineOutpout.readLine();
}
writer.flush();
} catch (IOException ex) {
logger.log(Level.WARNING, "Error reading log2timeline output stream.", ex);//NON-NLS
}
}
@Override
public boolean cancel() {
cancelled = true;
return true;
}
}
}

View File

@ -1,123 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* A factory that creates data source ingest modules that run Plaso against an
* image and saves the storage file to module output.
*/
@ServiceProvider(service = IngestModuleFactory.class)
@NbBundle.Messages({"PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings"})
public class PlasoModuleFactory implements IngestModuleFactory {
@NbBundle.Messages({"PlasoModuleFactory_moduleName=Plaso"})
static String getModuleName() {
return Bundle.PlasoModuleFactory_moduleName();
}
@Override
public String getModuleDisplayName() {
return getModuleName();
}
@NbBundle.Messages({"PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source."})
@Override
public String getModuleDescription() {
return Bundle.PlasoModuleFactory_moduleDesc();
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public boolean isDataSourceIngestModuleFactory() {
return true;
}
@Override
public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings settings) {
assert settings instanceof PlasoModuleSettings;
if (settings instanceof PlasoModuleSettings) {
return new PlasoIngestModule((PlasoModuleSettings) settings);
}
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
}
@Override
public boolean hasGlobalSettingsPanel() {
return false;
}
/**
* This module typically takes a very long time and is therefore not enabled
* by default.
*
* @return False.
*/
@Override
public boolean isEnabledByDefault() {
return false;
}
@Override
public IngestModuleGlobalSettingsPanel getGlobalSettingsPanel() {
throw new UnsupportedOperationException();
}
@Override
public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
return new PlasoModuleSettings();
}
@Override
public boolean hasIngestJobSettingsPanel() {
return true;
}
@Override
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
assert settings instanceof PlasoModuleSettings;
if (settings instanceof PlasoModuleSettings) {
return new PlasoModuleSettingsPanel((PlasoModuleSettings) settings);
}
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
}
@Override
public boolean isFileIngestModuleFactory() {
return false;
}
@Override
public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) {
throw new UnsupportedOperationException();
}
}

View File

@ -1,92 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import com.google.common.collect.ImmutableMap;
import java.util.HashMap;
import java.util.Map;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
/**
* Settings for the Plaso Ingest Module.
*/
public class PlasoModuleSettings implements IngestModuleIngestJobSettings {
private static final long serialVersionUID = 1L;
/** Map from parser name (or match pattern) to its enabled state. */
final Map<String, Boolean> parsers = new HashMap<>();
/**
* Get an immutable map from parser name to its enabled state. Parsers
* mapped to true or with no entry will be enabled. Parsers mapped to false,
* will be disabled.
*/
Map<String, Boolean> getParsers() {
return ImmutableMap.copyOf(parsers);
}
/**
* Constructor. The PlasoModuleSettings will have the default parsers
* (winreg, pe, chrome, firefox, internet explorer) disabled.
*/
public PlasoModuleSettings() {
parsers.put("winreg", false);
parsers.put("pe", false);
//chrome
parsers.put("chrome_preferences", false);
parsers.put("chrome_cache", false);
parsers.put("chrome_27_history", false);
parsers.put("chrome_8_history", false);
parsers.put("chrome_cookies", false);
parsers.put("chrome_extension_activity", false);
//firefox
parsers.put("firefox_cache", false);
parsers.put("firefox_cache2", false);
parsers.put("firefox_cookies", false);
parsers.put("firefox_downloads", false);
parsers.put("firefox_history", false);
//Internet Explorer
parsers.put("msiecf", false);
parsers.put("msie_webcache", false);
}
/**
* Gets the serialization version number.
*
* @return A serialization version number.
*/
@Override
public long getVersionNumber() {
return serialVersionUID;
}
/**
* Set the given parser enabled/disabled
*
* @param parserName The name of the parser to enable/disable
* @param selected The new state (enabled/disabled) for the given parser.
*/
void setParserEnabled(String parserName, boolean selected) {
parsers.put(parserName, selected);
}
}

View File

@ -1,84 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
<AuxValues>
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
</AuxValues>
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="plasoParserInfoTextArea" max="32767" attributes="0"/>
<Component id="peCheckBox" min="-2" max="-2" attributes="0"/>
<Component id="winRegCheckBox" min="-2" max="-2" attributes="0"/>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" max="-2" attributes="0"/>
<Component id="plasoParserInfoTextArea" pref="188" max="32767" attributes="0"/>
<EmptySpace type="separate" max="-2" attributes="0"/>
<Component id="winRegCheckBox" min="-2" max="-2" attributes="0"/>
<EmptySpace type="unrelated" min="-2" max="-2" attributes="0"/>
<Component id="peCheckBox" min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
</Layout>
<SubComponents>
<Component class="javax.swing.JCheckBox" name="winRegCheckBox">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.winRegCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="winRegCheckBoxActionPerformed"/>
</Events>
</Component>
<Component class="javax.swing.JCheckBox" name="peCheckBox">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.peCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="peCheckBoxActionPerformed"/>
</Events>
</Component>
<Component class="javax.swing.JTextArea" name="plasoParserInfoTextArea">
<Properties>
<Property name="editable" type="boolean" value="false"/>
<Property name="background" type="java.awt.Color" editor="org.netbeans.beaninfo.editors.ColorEditor">
<Color blue="f0" green="f0" id="Panel.background" palette="3" red="f0" type="palette"/>
</Property>
<Property name="columns" type="int" value="20"/>
<Property name="lineWrap" type="boolean" value="true"/>
<Property name="rows" type="int" value="5"/>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.plasoParserInfoTextArea.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="wrapStyleWord" type="boolean" value="true"/>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
</Properties>
</Component>
</SubComponents>
</Form>

View File

@ -1,115 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* Settings panel for the PlasoIngestModule.
*/
public class PlasoModuleSettingsPanel extends IngestModuleIngestJobSettingsPanel {
private final PlasoModuleSettings settings;
public PlasoModuleSettingsPanel(PlasoModuleSettings settings) {
this.settings = settings;
initComponents();
}
/** This method is called from within the constructor to initialize the
* form. WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
winRegCheckBox = new javax.swing.JCheckBox();
peCheckBox = new javax.swing.JCheckBox();
plasoParserInfoTextArea = new javax.swing.JTextArea();
org.openide.awt.Mnemonics.setLocalizedText(winRegCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.winRegCheckBox.text")); // NOI18N
winRegCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
winRegCheckBoxActionPerformed(evt);
}
});
org.openide.awt.Mnemonics.setLocalizedText(peCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.peCheckBox.text")); // NOI18N
peCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
peCheckBoxActionPerformed(evt);
}
});
plasoParserInfoTextArea.setEditable(false);
plasoParserInfoTextArea.setBackground(javax.swing.UIManager.getDefaults().getColor("Panel.background"));
plasoParserInfoTextArea.setColumns(20);
plasoParserInfoTextArea.setLineWrap(true);
plasoParserInfoTextArea.setRows(5);
plasoParserInfoTextArea.setText(org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.plasoParserInfoTextArea.text")); // NOI18N
plasoParserInfoTextArea.setWrapStyleWord(true);
plasoParserInfoTextArea.setBorder(null);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(plasoParserInfoTextArea)
.addComponent(peCheckBox)
.addComponent(winRegCheckBox))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(plasoParserInfoTextArea, javax.swing.GroupLayout.DEFAULT_SIZE, 188, Short.MAX_VALUE)
.addGap(18, 18, 18)
.addComponent(winRegCheckBox)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(peCheckBox)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
private void winRegCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_winRegCheckBoxActionPerformed
settings.setParserEnabled("winreg", winRegCheckBox.isSelected());
}//GEN-LAST:event_winRegCheckBoxActionPerformed
private void peCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_peCheckBoxActionPerformed
settings.setParserEnabled("pe", peCheckBox.isSelected());
}//GEN-LAST:event_peCheckBoxActionPerformed
@Override
public IngestModuleIngestJobSettings getSettings() {
return settings;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JCheckBox peCheckBox;
private javax.swing.JTextArea plasoParserInfoTextArea;
private javax.swing.JCheckBox winRegCheckBox;
// End of variables declaration//GEN-END:variables
}

View File

@ -1,15 +1,15 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2013-2018 Basis Technology Corp. * Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -18,23 +18,18 @@
*/ */
package org.sleuthkit.autopsy.modules.stix; package org.sleuthkit.autopsy.modules.stix;
import java.util.Arrays; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.logging.Level; import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil; import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE;
import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -43,20 +38,18 @@ import org.sleuthkit.datamodel.TskCoreException;
*/ */
class StixArtifactData { class StixArtifactData {
private static final String MODULE_NAME = "Stix";
private AbstractFile file; private AbstractFile file;
private final String observableId; private final String observableId;
private final String objType; private final String objType;
private static final Logger logger = Logger.getLogger(StixArtifactData.class.getName()); private static final Logger logger = Logger.getLogger(StixArtifactData.class.getName());
StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) { public StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
file = a_file; file = a_file;
observableId = a_observableId; observableId = a_observableId;
objType = a_objType; objType = a_objType;
} }
StixArtifactData(long a_objId, String a_observableId, String a_objType) { public StixArtifactData(long a_objId, String a_observableId, String a_objType) {
try { try {
Case case1 = Case.getCurrentCaseThrows(); Case case1 = Case.getCurrentCaseThrows();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase(); SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
@ -69,35 +62,39 @@ class StixArtifactData {
} }
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.", @Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
"StixArtifactData.noOpenCase.errMsg=No open case available."}) "StixArtifactData.noOpenCase.errMsg=No open case available."})
public void createArtifact(String a_title) throws TskCoreException { public void createArtifact(String a_title) throws TskCoreException {
Blackboard blackboard; Case currentCase;
try { try {
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); currentCase = Case.getCurrentCaseThrows();
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage()); MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage());
return; return;
} }
String setName;
if (a_title != null) {
setName = "STIX Indicator - " + a_title; //NON-NLS
} else {
setName = "STIX Indicator - (no title)"; //NON-NLS
}
String setName = "STIX Indicator - " + StringUtils.defaultIfBlank(a_title, "(no title)"); //NON-NLS Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, "Stix", setName)); //NON-NLS
Collection<BlackboardAttribute> attributes = Arrays.asList( attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, "Stix", observableId)); //NON-NLS
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, setName), attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, "Stix", objType)); //NON-NLS
new BlackboardAttribute(TSK_TITLE, MODULE_NAME, observableId),
new BlackboardAttribute(TSK_CATEGORY, MODULE_NAME, objType)); org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist. // Create artifact if it doesn't already exist.
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) { if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact bba = file.newArtifact(TSK_INTERESTING_FILE_HIT); BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
bba.addAttributes(attributes); bba.addAttributes(attributes);
try { try {
/* // index the artifact for keyword search
* post the artifact which will index the artifact for keyword Blackboard blackboard = currentCase.getServices().getBlackboard();
* search, and fire an event to notify UI of this new artifact blackboard.indexArtifact(bba);
*/
blackboard.postArtifact(bba, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) { } catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName()); MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());

View File

@ -87,7 +87,7 @@ FileReportDataTypes.path.text=Full Path
FileReportText.getName.text=Files - Text FileReportText.getName.text=Files - Text
FileReportText.getDesc.text=A delimited text file containing information about individual files in the case. FileReportText.getDesc.text=A delimited text file containing information about individual files in the case.
ReportBodyFile.progress.querying=Querying files... ReportBodyFile.progress.querying=Querying files...
ReportBodyFile.ingestWarning.text=Warning, this report was run before ingest services completed! ReportBodyFile.ingestWarning.text=Warning, this report was run before ingest services completed\!
ReportBodyFile.progress.loading=Loading files... ReportBodyFile.progress.loading=Loading files...
ReportBodyFile.progress.processing=Now processing {0}... ReportBodyFile.progress.processing=Now processing {0}...
ReportBodyFile.getName.text=TSK Body File ReportBodyFile.getName.text=TSK Body File
@ -229,13 +229,13 @@ ReportHTML.getName.text=HTML Report
ReportHTML.getDesc.text=A report about results and tagged items in HTML format. ReportHTML.getDesc.text=A report about results and tagged items in HTML format.
ReportHTML.writeIndex.title=for case {0} ReportHTML.writeIndex.title=for case {0}
ReportHTML.writeIndex.noFrames.msg=Your browser is not compatible with our frame setup. ReportHTML.writeIndex.noFrames.msg=Your browser is not compatible with our frame setup.
ReportHTML.writeIndex.noFrames.seeNav=Please see <a href="content\nav.html">the navigation page</a> for artifact links, ReportHTML.writeIndex.noFrames.seeNav=Please see <a href\="content\nav.html">the navigation page</a> for artifact links,
ReportHTML.writeIndex.seeSum=and <a href="contentsummary.html">the summary page</a> for a case summary. ReportHTML.writeIndex.seeSum=and <a href\="content\summary.html">the summary page</a> for a case summary.
ReportHTML.writeNav.title=Report Navigation ReportHTML.writeNav.title=Report Navigation
ReportHTML.writeNav.h1=Report Navigation ReportHTML.writeNav.h1=Report Navigation
ReportHTML.writeNav.summary=Case Summary ReportHTML.writeNav.summary=Case Summary
ReportHTML.writeSum.title=Case Summary ReportHTML.writeSum.title=Case Summary
ReportHTML.writeSum.warningMsg=<span>Warning, this report was run before ingest services completed!</span> ReportHTML.writeSum.warningMsg=<span>Warning, this report was run before ingest services completed\!</span>
# #
# autopsy/test/scripts/regression.py._html_report_diff() uses reportGenOn.text, caseName, caseNum, # autopsy/test/scripts/regression.py._html_report_diff() uses reportGenOn.text, caseName, caseNum,
# examiner as a regex signature to skip report.html and summary.html # examiner as a regex signature to skip report.html and summary.html

View File

View File

View File

@ -23,8 +23,7 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter; import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.datamodel.Blackboard; import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
@ -66,7 +65,7 @@ final class CustomArtifactType {
* @throws BlackboardException If there is an error adding any of the types. * @throws BlackboardException If there is an error adding any of the types.
*/ */
static void addToCaseDatabase() throws Blackboard.BlackboardException, NoCurrentCaseException { static void addToCaseDatabase() throws Blackboard.BlackboardException, NoCurrentCaseException {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
artifactType = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAME, ARTIFACT_DISPLAY_NAME); artifactType = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAME, ARTIFACT_DISPLAY_NAME);
intAttrType = blackboard.getOrAddAttributeType(INT_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, INT_ATTR_DISPLAY_NAME); intAttrType = blackboard.getOrAddAttributeType(INT_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, INT_ATTR_DISPLAY_NAME);
doubleAttrType = blackboard.getOrAddAttributeType(DOUBLE_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, DOUBLE_ATTR_DISPLAY_NAME); doubleAttrType = blackboard.getOrAddAttributeType(DOUBLE_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, DOUBLE_ATTR_DISPLAY_NAME);

View File

@ -21,11 +21,11 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;

View File

@ -21,11 +21,11 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**

View File

@ -21,22 +21,23 @@ package org.sleuthkit.autopsy.test;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.Exceptions; import org.openide.util.Exceptions;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException; import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
/** /**
* A file ingest module that creates some interestng artifacts with attributes * A file ingest module that creates some interestng artifacts
* based on files for test purposes. * with attributes based on files for test purposes.
*/ */
@NbBundle.Messages({ @NbBundle.Messages({
"InterestingArtifactCreatorIngestModule.exceptionMessage.errorCreatingCustomType=Error creating custom artifact type." "InterestingArtifactCreatorIngestModule.exceptionMessage.errorCreatingCustomType=Error creating custom artifact type."
@ -54,9 +55,9 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
@Override @Override
public void startUp(IngestJobContext context) throws IngestModuleException { public void startUp(IngestJobContext context) throws IngestModuleException {
try { try {
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
artifactType = blackboard.getOrAddArtifactType(INT_ARTIFACT_TYPE_NAME, INT_ARTIFACT_DISPLAY_NAME); artifactType = blackboard.getOrAddArtifactType(INT_ARTIFACT_TYPE_NAME, INT_ARTIFACT_DISPLAY_NAME);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) { } catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.InterestingArtifactCreatorIngestModule_exceptionMessage_errorCreatingCustomType(), ex); throw new IngestModuleException(Bundle.InterestingArtifactCreatorIngestModule_exceptionMessage_errorCreatingCustomType(), ex);
} }
} }
@ -76,7 +77,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
* type. * type.
*/ */
int randomArtIndex = (int) (Math.random() * 3); int randomArtIndex = (int) (Math.random() * 3);
Blackboard blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard(); Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]); BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]);
BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID()); BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID());
Collection<BlackboardAttribute> baseAttributes = new ArrayList<>(); Collection<BlackboardAttribute> baseAttributes = new ArrayList<>();

View File

@ -2,8 +2,6 @@ CTL_MakeTimeline=Timeline
CTL_TimeLineTopComponentAction=TimeLineTopComponent CTL_TimeLineTopComponentAction=TimeLineTopComponent
CTL_TimeLineTopComponent=Timeline CTL_TimeLineTopComponent=Timeline
FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval.
FilteredEventsModel.timeRangeProperty.errorTitle=Timeline
OpenTimelineAction.displayName=Timeline OpenTimelineAction.displayName=Timeline
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources. OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings. OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.
@ -11,7 +9,7 @@ PrompDialogManager.buttonType.continueNoUpdate=Continue Without Updating
PrompDialogManager.buttonType.showTimeline=Continue PrompDialogManager.buttonType.showTimeline=Continue
PrompDialogManager.buttonType.update=Update DB PrompDialogManager.buttonType.update=Update DB
PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue? PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?
PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete. PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.
PromptDialogManager.progressDialog.title=Populating Timeline Data PromptDialogManager.progressDialog.title=Populating Timeline Data
PromptDialogManager.rebuildPrompt.details=Details PromptDialogManager.rebuildPrompt.details=Details
PromptDialogManager.rebuildPrompt.headerText=The Timeline DB is incomplete and/or out of date. Some events may be missing or inaccurate and some features may be unavailable. PromptDialogManager.rebuildPrompt.headerText=The Timeline DB is incomplete and/or out of date. Some events may be missing or inaccurate and some features may be unavailable.
@ -34,6 +32,16 @@ Timeline.goToButton.text=Go To:
Timeline.yearBarChart.x.years=Years Timeline.yearBarChart.x.years=Years
Timeline.resultPanel.loading=Loading... Timeline.resultPanel.loading=Loading...
TimeLineController.errorTitle=Timeline error.
TimeLineController.outOfDate.errorMessage=Error determing if the timeline is out of date. We will assume it should be updated. See the logs for more details.
TimeLineController.rebuildReasons.incompleteOldSchema=The Timeline events database was previously populated without incomplete information: Some features may be unavailable or non-functional unless you update the events database.
TimeLineController.rebuildReasons.ingestWasRunning=The Timeline events database was previously populated while ingest was running: Some events may be missing, incomplete, or inaccurate.
TimeLineController.rebuildReasons.outOfDate=The event data is out of date: Not all events will be visible.
TimeLineController.rebuildReasons.outOfDateError=Could not determine if the timeline data is out of date.
TimeLineController.setEventsDBStale.errMsgNotStale=Failed to mark the timeline db as not stale. Some results may be out of date or missing.
TimeLineController.setEventsDBStale.errMsgStale=Failed to mark the timeline db as stale. Some results may be out of date or missing.
TimeLinecontroller.setIngestRunning.errMsgNotRunning=Failed to mark the timeline db as populated while ingest was not running. Some results may be out of date or missing.
TimeLineController.setIngestRunning.errMsgRunning=Failed to mark the timeline db as populated while ingest was running. Some results may be out of date or missing.
TimeLinecontroller.updateNowQuestion=Do you want to update the events database now? TimeLinecontroller.updateNowQuestion=Do you want to update the events database now?
TimelineFrame.title=Timeline TimelineFrame.title=Timeline
TimelinePanel.jButton1.text=6m TimelinePanel.jButton1.text=6m

View File

@ -1,701 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.DescriptionLoD;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.EventTypeZoomLevel;
import org.sleuthkit.datamodel.timeline.TimelineEvent;
import org.sleuthkit.datamodel.timeline.TimelineFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagNameFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.timeline.TimelineFilter.TextFilter;
/**
* This class acts as the model for a TimelineView
*
* Views can register listeners on properties returned by methods.
*
* This class is implemented as a filtered view into an underlying
* TimelineManager.
*
* Maintainers, NOTE: as many methods as possible should cache their results so
* as to avoid unnecessary db calls through the TimelineManager -jm
*
* Concurrency Policy: TimelineManager is internally synchronized, so methods
* that only access the TimelineManager atomically do not need further
* synchronization. All other member state variables should only be accessed
* with intrinsic lock of containing FilteredEventsModel held.
*
*/
public final class FilteredEventsModel {
private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
private final TimelineManager eventManager;
private final Case autoCase;
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
//Filter and zoome state
private final ReadOnlyObjectWrapper<RootFilterState> requestedFilter = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper< EventTypeZoomLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(EventTypeZoomLevel.BASE_TYPE);
private final ReadOnlyObjectWrapper< DescriptionLoD> requestedLOD = new ReadOnlyObjectWrapper<>(DescriptionLoD.SHORT);
// end Filter and zoome state
//caches
private final LoadingCache<Object, Long> maxCache;
private final LoadingCache<Object, Long> minCache;
private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<EventType, Long>> eventCountsCache;
/** Map from datasource id to datasource name. */
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
// end caches
/**
* Make a DataSourceFilter from an entry from the datasourcesMap.
*
* @param dataSourceEntry A map entry from datasource id to datasource name.
*
* @return A new DataSourceFilter for the given datsourcesMap entry.
*/
private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry<Long, String> dataSourceEntry) {
return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
}
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
this.autoCase = autoCase;
this.eventManager = autoCase.getSleuthkitCase().getTimelineManager();
populateFilterData();
//caches
idToEventCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(eventManager::getEventById));
eventCountsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(this::countEventsByType));
maxCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxTime()));
minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
InvalidationListener filterSyncListener = observable -> {
RootFilterState rootFilter = filterProperty().get();
syncFilters(rootFilter);
requestedFilter.set(rootFilter.copyOf());
};
datasourcesMap.addListener(filterSyncListener);
hashSets.addListener(filterSyncListener);
tagNames.addListener(filterSyncListener);
requestedFilter.set(getDefaultFilter());
requestedZoomState.addListener(observable -> {
final ZoomState zoomState = requestedZoomState.get();
if (zoomState != null) {
synchronized (FilteredEventsModel.this) {
requestedTypeZoom.set(zoomState.getTypeZoomLevel());
requestedFilter.set(zoomState.getFilterState());
requestedTimeRange.set(zoomState.getTimeRange());
requestedLOD.set(zoomState.getDescriptionLOD());
}
}
});
requestedZoomState.bind(currentStateProperty);
}
/**
* get the count of all events that fit the given zoom params organized by
* the EvenType of the level specified in the zoomState
*
* @param zoomState The params that control what events to count and how to
* organize the returned map
*
* @return a map from event type( of the requested level) to event counts
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
private Map<EventType, Long> countEventsByType(ZoomState zoomState) throws TskCoreException {
if (zoomState.getTimeRange() == null) {
return Collections.emptyMap();
} else {
return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000,
zoomState.getTimeRange().getEndMillis() / 1000,
zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel());
}
}
public TimelineManager getEventManager() {
return eventManager;
}
public SleuthkitCase getSleuthkitCase() {
return autoCase.getSleuthkitCase();
}
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(timeRange, filter, timeZone);
}
/**
* Readonly observable property for the current ZoomState
*
* @return A readonly observable property for the current ZoomState.
*/
synchronized public ReadOnlyObjectProperty<ZoomState> zoomStateProperty() {
return requestedZoomState.getReadOnlyProperty();
}
/**
* Get the current ZoomState
*
* @return The current ZoomState
*/
synchronized public ZoomState getZoomState() {
return requestedZoomState.get();
}
/**
* Update the data used to determine the available filters.
*/
synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase();
hashSets.addAll(eventManager.getHashSetNames());
//because there is no way to remove a datasource we only add to this map.
for (DataSource ds : eventManager.getSleuthkitCase().getDataSources()) {
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
}
//should this only be tags applied to files or event bearing artifacts?
tagNames.setAll(skCase.getTagNamesInUse());
}
/**
* "sync" the given root filter with the state of the casee: Disable filters
* for tags that are not in use in the case, and add new filters for tags,
* hashsets, and datasources. that don't have them. New filters are selected
* by default.
*
* @param rootFilterState the filter state to modify so it is consistent
* with the tags in use in the case
*/
public void syncFilters(RootFilterState rootFilterState) {
TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
for (TagName tagName : tagNames) {
tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
}
for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
}
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
for (String hashSet : hashSets) {
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
}
}
/**
* Get a read only view of the time range currently in view.
*
* @return A read only view of the time range currently in view.
*/
@NbBundle.Messages({
"FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
"FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
if (requestedTimeRange.get() == null) {
try {
requestedTimeRange.set(getSpanningInterval());
} catch (TskCoreException timelineCacheException) {
MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
}
}
return requestedTimeRange.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<DescriptionLoD> descriptionLODProperty() {
return requestedLOD.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<RootFilterState> filterProperty() {
return requestedFilter.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
return requestedTypeZoom.getReadOnlyProperty();
}
/**
* The time range currently in view.
*
* @return The time range currently in view.
*/
synchronized public Interval getTimeRange() {
return getZoomState().getTimeRange();
}
synchronized public DescriptionLoD getDescriptionLOD() {
return getZoomState().getDescriptionLOD();
}
synchronized public RootFilterState getFilterState() {
return getZoomState().getFilterState();
}
synchronized public EventTypeZoomLevel getEventTypeZoom() {
return getZoomState().getTypeZoomLevel();
}
/** Get the default filter used at startup.
*
* @return the default filter used at startup
*/
public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
HashHitsFilter hashHitsFilter = new HashHitsFilter();
hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
TagsFilter tagsFilter = new TagsFilter();
tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
return new RootFilterState(new RootFilter(new HideKnownFilter(),
tagsFilter,
hashHitsFilter,
new TextFilter(),
new EventTypeFilter(EventType.ROOT_EVENT_TYPE),
dataSourcesFilter,
fileTypesFilter,
Collections.emptySet()));
}
public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone);
}
public TimelineEvent getEventById(Long eventID) throws TskCoreException {
try {
return idToEventCache.get(eventID);
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached event from ID", ex);
}
}
public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
Set<TimelineEvent> events = new HashSet<>();
for (Long id : eventIDs) {
events.add(getEventById(id));
}
return events;
}
/**
* get a count of tagnames applied to the given event ids as a map from
* tagname displayname to count of tag applications
*
* @param eventIDsWithTags the event ids to get the tag counts map for
*
* @return a map from tagname displayname to count of applications
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
return eventManager.getTagCountsByTagName(eventIDsWithTags);
}
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
final Interval overlap;
RootFilter intersection;
synchronized (this) {
overlap = getSpanningInterval().overlap(timeRange);
intersection = getFilterState().intersect(filter).getActiveFilter();
}
return eventManager.getEventIDs(overlap, intersection);
}
/**
* Return the number of events that pass the requested filter and are within
* the given time range.
*
* NOTE: this method does not change the requested time range
*
* @param timeRange
*
* @return
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<EventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
final RootFilterState filter;
final EventTypeZoomLevel typeZoom;
synchronized (this) {
filter = getFilterState();
typeZoom = getEventTypeZoom();
}
try {
return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null));
} catch (ExecutionException executionException) {
throw new TskCoreException("Error getting cached event counts.`1", executionException);
}
}
/**
* @return The smallest interval spanning all the events from the case,
* ignoring any filters or requested ranges.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
}
/**
* Get the smallest interval spanning all the given events.
*
* @param eventIDs The IDs of the events to get a spanning interval arround.
*
* @return the smallest interval spanning all the given events
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
return eventManager.getSpanningInterval(eventIDs);
}
/**
* @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMinTime() throws TskCoreException {
try {
return minCache.get("min"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached min time.", ex);
}
}
/**
* @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMaxTime() throws TskCoreException {
try {
return maxCache.get("max"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached max time.", ex);
}
}
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
ContentTag contentTag = evt.getAddedTag();
Content content = contentTag.getContent();
Set<Long> updatedEventIDs = addTag(content.getId(), null, contentTag);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
BlackboardArtifactTag artifactTag = evt.getAddedTag();
BlackboardArtifact artifact = artifactTag.getArtifact();
Set<Long> updatedEventIDs = addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
/**
* Get a Set of event IDs for the events that are derived from the given
* file.
*
* @param file The AbstractFile to get derived event IDs
* for.
* @param includeDerivedArtifacts If true, also get event IDs for events
* derived from artifacts derived form this
* file. If false, only gets events derived
* directly from this file (file system
* timestamps).
*
* @return A Set of event IDs for the events that are derived from the given
* file.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
return eventManager.getEventIDsForFile(file, includeDerivedArtifacts);
}
/**
* Get a List of event IDs for the events that are derived from the given
* artifact.
*
* @param artifact The BlackboardArtifact to get derived event IDs for.
*
* @return A List of event IDs for the events that are derived from the
* given artifact.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
return eventManager.getEventIDsForArtifact(artifact);
}
/**
* Post a TagsAddedEvent to all registered subscribers, if the given set of
* updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsAddedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsAddedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Post a TagsDeletedEvent to all registered subscribers, if the given set
* of updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsDeletedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Register the given object to receive events.
*
* @param subscriber The object to register. Must implement public methods
* annotated with Subscribe.
*/
synchronized public void registerForEvents(Object subscriber) {
eventbus.register(subscriber);
}
/**
* Un-register the given object, so it no longer receives events.
*
* @param subscriber The object to un-register.
*/
synchronized public void unRegisterForEvents(Object subscriber) {
eventbus.unregister(subscriber);
}
/**
* Post a RefreshRequestedEvent to all registered subscribers.
*/
public void postRefreshRequest() {
eventbus.post(new RefreshRequestedEvent());
}
/**
* (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers.
*
* @param event The event to re-post.
*/
public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event);
}
public ImmutableList<EventType> getEventTypes() {
return eventManager.getEventTypes();
}
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) throws TskCoreException {
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, true);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) throws TskCoreException {
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, tagged);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
synchronized public Set<Long> setHashHit(Collection<BlackboardArtifact> artifacts, boolean hasHashHit) throws TskCoreException {
Set<Long> updatedEventIDs = new HashSet<>();
for (BlackboardArtifact artifact : artifacts) {
updatedEventIDs.addAll(eventManager.setEventsHashed(artifact.getObjectID(), hasHashHit));
}
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
/**
* Invalidate the timeline caches for the given event IDs. Also forces the
* filter values to be updated with any new values from the case data.( data
* sources, tags, etc)
*
* @param updatedEventIDs A collection of the event IDs whose cached event
* objects should be invalidated. Can be null or an
* empty sett to invalidate the general caches, such
* as min/max time, or the counts per event type.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
minCache.invalidateAll();
maxCache.invalidateAll();
idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
eventCountsCache.invalidateAll();
populateFilterData();
eventbus.post(new CacheInvalidatedEvent());
}
/**
* Event fired when a cache has been invalidated. The UI should make it
* clear that the view is potentially out of date and present an action to
* refresh the view.
*/
public static class CacheInvalidatedEvent {
private CacheInvalidatedEvent() {
}
}
}

View File

@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.timeline; package org.sleuthkit.autopsy.timeline;
import java.awt.Component; import java.awt.Component;
import java.io.IOException;
import java.util.logging.Level; import java.util.logging.Level;
import javafx.application.Platform; import javafx.application.Platform;
import javax.swing.ImageIcon; import javax.swing.ImageIcon;
@ -46,13 +47,10 @@ import org.sleuthkit.datamodel.TskCoreException;
* An Action that opens the Timeline window. Has methods to open the window in * An Action that opens the Timeline window. Has methods to open the window in
* various specific states (e.g., showing a specific artifact in the List View) * various specific states (e.g., showing a specific artifact in the List View)
*/ */
@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline") @ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline")
@ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false) @ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false)
@ActionReferences(value = { @ActionReferences(value = {
@ActionReference(path = "Menu/Tools", position = 102) @ActionReference(path = "Menu/Tools", position = 102),
,
@ActionReference(path = "Toolbars/Case", position = 102)}) @ActionReference(path = "Toolbars/Case", position = 102)})
public final class OpenTimelineAction extends CallableSystemAction { public final class OpenTimelineAction extends CallableSystemAction {
@ -60,10 +58,19 @@ public final class OpenTimelineAction extends CallableSystemAction {
private static final Logger logger = Logger.getLogger(OpenTimelineAction.class.getName()); private static final Logger logger = Logger.getLogger(OpenTimelineAction.class.getName());
private static final int FILE_LIMIT = 6_000_000; private static final int FILE_LIMIT = 6_000_000;
private static TimeLineController timeLineController = null;
private final JMenuItem menuItem; private final JMenuItem menuItem;
private final JButton toolbarButton = new JButton(getName(), private final JButton toolbarButton = new JButton(getName(),
new ImageIcon(getClass().getResource("images/btn_icon_timeline_colorized_26.png"))); //NON-NLS new ImageIcon(getClass().getResource("images/btn_icon_timeline_colorized_26.png"))); //NON-NLS
/**
* Invalidate the reference to the controller so that a new one will be
* instantiated the next time this action is invoked
*/
synchronized static void invalidateController() {
timeLineController = null;
}
public OpenTimelineAction() { public OpenTimelineAction() {
toolbarButton.addActionListener(actionEvent -> performAction()); toolbarButton.addActionListener(actionEvent -> performAction());
@ -86,24 +93,24 @@ public final class OpenTimelineAction extends CallableSystemAction {
public void performAction() { public void performAction() {
if (tooManyFiles()) { if (tooManyFiles()) {
Platform.runLater(PromptDialogManager::showTooManyFiles); Platform.runLater(PromptDialogManager::showTooManyFiles);
synchronized (OpenTimelineAction.this) {
if (timeLineController != null) {
timeLineController.shutDownTimeLine();
}
}
setEnabled(false); setEnabled(false);
} else if ("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) { }else if("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) {
Platform.runLater(PromptDialogManager::showTimeLineDisabledMessage); Platform.runLater(PromptDialogManager::showTimeLineDisabledMessage);
setEnabled(false); setEnabled(false);
} else { }else {
try { showTimeline();
showTimeline();
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
logger.log(Level.SEVERE, "Error showingtimeline.", ex);
}
} }
} }
@NbBundle.Messages({ @NbBundle.Messages({
"OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.", "OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.",
"OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources."}) "OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources."})
synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) throws TskCoreException { synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) {
try { try {
Case currentCase = Case.getCurrentCaseThrows(); Case currentCase = Case.getCurrentCaseThrows();
if (currentCase.hasData() == false) { if (currentCase.hasData() == false) {
@ -111,8 +118,20 @@ public final class OpenTimelineAction extends CallableSystemAction {
logger.log(Level.INFO, "Could not create timeline, there are no data sources.");// NON-NLS logger.log(Level.INFO, "Could not create timeline, there are no data sources.");// NON-NLS
return; return;
} }
TimeLineController controller = TimeLineModule.getController(); try {
controller.showTimeLine(file, artifact); if (timeLineController == null) {
timeLineController = new TimeLineController(currentCase);
} else if (timeLineController.getAutopsyCase() != currentCase) {
timeLineController.shutDownTimeLine();
timeLineController = new TimeLineController(currentCase);
}
timeLineController.showTimeLine(file, artifact);
} catch (IOException iOException) {
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
logger.log(Level.SEVERE, "Failed to initialize per case timeline settings.", iOException);
}
} catch (NoCurrentCaseException e) { } catch (NoCurrentCaseException e) {
//there is no case... Do nothing. //there is no case... Do nothing.
} }
@ -122,7 +141,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* Open the Timeline window with the default initial view. * Open the Timeline window with the default initial view.
*/ */
@ThreadConfined(type = ThreadConfined.ThreadType.AWT) @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showTimeline() throws TskCoreException { public void showTimeline() {
showTimeline(null, null); showTimeline(null, null);
} }
@ -134,7 +153,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* @param file The AbstractFile to show in the Timeline. * @param file The AbstractFile to show in the Timeline.
*/ */
@ThreadConfined(type = ThreadConfined.ThreadType.AWT) @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showFileInTimeline(AbstractFile file) throws TskCoreException { public void showFileInTimeline(AbstractFile file) {
showTimeline(file, null); showTimeline(file, null);
} }
@ -145,7 +164,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* @param artifact The BlackboardArtifact to show in the Timeline. * @param artifact The BlackboardArtifact to show in the Timeline.
*/ */
@ThreadConfined(type = ThreadConfined.ThreadType.AWT) @ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showArtifactInTimeline(BlackboardArtifact artifact) throws TskCoreException { public void showArtifactInTimeline(BlackboardArtifact artifact) {
showTimeline(null, artifact); showTimeline(null, artifact);
} }

View File

@ -0,0 +1,175 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2016-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.Properties;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.casemodule.Case;
/**
* Provides access to per-case timeline properties (key-value store).
*/
class PerCaseTimelineProperties {
private static final String STALE_KEY = "stale"; //NON-NLS
private static final String WAS_INGEST_RUNNING_KEY = "was_ingest_running"; // NON-NLS
private final Path propertiesPath;
PerCaseTimelineProperties(Case autopsyCase) {
Objects.requireNonNull(autopsyCase, "Case must not be null");
propertiesPath = Paths.get(autopsyCase.getModuleDirectory(), "Timeline", "timeline.properties"); //NON-NLS
}
/**
* Is the DB stale, i.e. does it need to be updated because new datasources
* (eg) have been added to the case.
*
* @return true if the db is stale
*
* @throws IOException if there is a problem reading the state from disk
*/
public synchronized boolean isDBStale() throws IOException {
String stale = getProperty(STALE_KEY);
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
}
/**
* record the state of the events db as stale(true) or not stale(false).
*
* @param stale the new state of the event db. true for stale, false for not
* stale.
*
* @throws IOException if there was a problem writing the state to disk.
*/
public synchronized void setDbStale(Boolean stale) throws IOException {
setProperty(STALE_KEY, stale.toString());
}
/**
* Was ingest running the last time the database was updated?
*
* @return true if ingest was running the last time the db was updated
*
* @throws IOException if there was a problem reading from disk
*/
public synchronized boolean wasIngestRunning() throws IOException {
String stale = getProperty(WAS_INGEST_RUNNING_KEY);
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
}
/**
* record whether ingest was running during the last time the database was
* updated
*
* @param ingestRunning true if ingest was running
*
* @throws IOException if there was a problem writing to disk
*/
public synchronized void setIngestRunning(Boolean ingestRunning) throws IOException {
setProperty(WAS_INGEST_RUNNING_KEY, ingestRunning.toString());
}
/**
* Get a {@link Path} to the properties file. If the file does not exist, it
* will be created.
*
* @return the Path to the properties file.
*
* @throws IOException if there was a problem creating the properties file
*/
private synchronized Path getPropertiesPath() throws IOException {
if (!Files.exists(propertiesPath)) {
Path parent = propertiesPath.getParent();
Files.createDirectories(parent);
Files.createFile(propertiesPath);
}
return propertiesPath;
}
/**
* Returns the property with the given key.
*
* @param propertyKey - The property key to get the value for.
*
* @return - the value associated with the property.
*
* @throws IOException if there was a problem reading the property from disk
*/
private synchronized String getProperty(String propertyKey) throws IOException {
return getProperties().getProperty(propertyKey);
}
/**
* Sets the given property to the given value.
*
* @param propertyKey - The key of the property to be modified.
* @param propertyValue - the value to set the property to.
*
* @throws IOException if there was a problem writing the property to disk
*/
private synchronized void setProperty(String propertyKey, String propertyValue) throws IOException {
Path propertiesFile = getPropertiesPath();
Properties props = getProperties(propertiesFile);
props.setProperty(propertyKey, propertyValue);
try (OutputStream fos = Files.newOutputStream(propertiesFile)) {
props.store(fos, ""); //NON-NLS
}
}
/**
* Get a {@link Properties} object used to store the timeline properties.
*
* @return a properties object
*
* @throws IOException if there was a problem reading the .properties file
*/
private synchronized Properties getProperties() throws IOException {
return getProperties(getPropertiesPath());
}
/**
* Gets a {@link Properties} object populated form the given .properties
* file.
*
* @param propertiesFile a path to the .properties file to load
*
* @return a properties object
*
* @throws IOException if there was a problem reading the .properties file
*/
private synchronized Properties getProperties(final Path propertiesFile) throws IOException {
try (InputStream inputStream = Files.newInputStream(propertiesFile)) {
Properties props = new Properties();
props.load(inputStream);
return props;
}
}
}

View File

@ -152,7 +152,7 @@ public final class PromptDialogManager {
* @return True if they want to continue anyways. * @return True if they want to continue anyways.
*/ */
@NbBundle.Messages({ @NbBundle.Messages({
"PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.", "PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.",
"PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?"}) "PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?"})
@ThreadConfined(type = ThreadConfined.ThreadType.JFX) @ThreadConfined(type = ThreadConfined.ThreadType.JFX)
boolean confirmDuringIngest() { boolean confirmDuringIngest() {
@ -235,4 +235,5 @@ public final class PromptDialogManager {
dialog.setHeaderText(Bundle.PromptDialogManager_showTimeLineDisabledMessage_headerText()); dialog.setHeaderText(Bundle.PromptDialogManager_showTimeLineDisabledMessage_headerText());
dialog.showAndWait(); dialog.showAndWait();
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2019 Basis Technology Corp. * Copyright 2011-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -25,12 +25,10 @@ import java.time.Instant;
import java.time.temporal.ChronoField; import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit; import java.time.temporal.ChronoUnit;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
import java.util.stream.Collectors;
import javafx.beans.binding.Bindings; import javafx.beans.binding.Bindings;
import javafx.beans.property.SimpleObjectProperty; import javafx.beans.property.SimpleObjectProperty;
import javafx.fxml.FXML; import javafx.fxml.FXML;
@ -60,15 +58,14 @@ import org.controlsfx.validation.Validator;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.datamodel.SingleEvent;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent; import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.TimelineEvent;
/** /**
* A Dialog that, given an AbstractFile or BlackBoardArtifact, allows the user * A Dialog that, given an AbstractFile or BlackBoardArtifact, allows the user
@ -96,13 +93,13 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
ChronoField.SECOND_OF_MINUTE); ChronoField.SECOND_OF_MINUTE);
@FXML @FXML
private TableView<TimelineEvent> eventTable; private TableView<SingleEvent> eventTable;
@FXML @FXML
private TableColumn<TimelineEvent, EventType> typeColumn; private TableColumn<SingleEvent, EventType> typeColumn;
@FXML @FXML
private TableColumn<TimelineEvent, Long> dateTimeColumn; private TableColumn<SingleEvent, Long> dateTimeColumn;
@FXML @FXML
private Spinner<Integer> amountSpinner; private Spinner<Integer> amountSpinner;
@ -115,6 +112,8 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
private final VBox contentRoot = new VBox(); private final VBox contentRoot = new VBox();
private final TimeLineController controller;
private final ValidationSupport validationSupport = new ValidationSupport(); private final ValidationSupport validationSupport = new ValidationSupport();
/** /**
@ -125,8 +124,10 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
* from. * from.
*/ */
@NbBundle.Messages({ @NbBundle.Messages({
"ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."}) "ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."
private ShowInTimelineDialog(TimeLineController controller, Collection<Long> eventIDS) throws TskCoreException { })
private ShowInTimelineDialog(TimeLineController controller, List<Long> eventIDS) {
this.controller = controller;
//load dialog content fxml //load dialog content fxml
final String name = "nbres:/" + StringUtils.replace(ShowInTimelineDialog.class.getPackage().getName(), ".", "/") + "/ShowInTimelineDialog.fxml"; // NON-NLS final String name = "nbres:/" + StringUtils.replace(ShowInTimelineDialog.class.getPackage().getName(), ".", "/") + "/ShowInTimelineDialog.fxml"; // NON-NLS
@ -194,16 +195,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
dateTimeColumn.setCellFactory(param -> new DateTimeTableCell<>()); dateTimeColumn.setCellFactory(param -> new DateTimeTableCell<>());
//add events to table //add events to table
Set<TimelineEvent> events = new HashSet<>(); eventTable.getItems().setAll(eventIDS.stream().map(controller.getEventsModel()::getEventById).collect(Collectors.toSet()));
FilteredEventsModel eventsModel = controller.getEventsModel();
for (Long eventID : eventIDS) {
try {
events.add(eventsModel.getEventById(eventID));
} catch (TskCoreException ex) {
throw new TskCoreException("Error getting event by id.", ex);
}
}
eventTable.getItems().setAll(events);
eventTable.setPrefHeight(Math.min(200, 24 * eventTable.getItems().size() + 28)); eventTable.setPrefHeight(Math.min(200, 24 * eventTable.getItems().size() + 28));
} }
@ -215,7 +207,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
* @param artifact The BlackboardArtifact to configure this dialog for. * @param artifact The BlackboardArtifact to configure this dialog for.
*/ */
@NbBundle.Messages({"ShowInTimelineDialog.artifactTitle=View Result in Timeline."}) @NbBundle.Messages({"ShowInTimelineDialog.artifactTitle=View Result in Timeline."})
ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) throws TskCoreException { ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) {
//get events IDs from artifact //get events IDs from artifact
this(controller, controller.getEventsModel().getEventIDsForArtifact(artifact)); this(controller, controller.getEventsModel().getEventIDsForArtifact(artifact));
@ -245,7 +237,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
@NbBundle.Messages({"# {0} - file path", @NbBundle.Messages({"# {0} - file path",
"ShowInTimelineDialog.fileTitle=View {0} in timeline.", "ShowInTimelineDialog.fileTitle=View {0} in timeline.",
"ShowInTimelineDialog.eventSelectionValidator.message=You must select an event."}) "ShowInTimelineDialog.eventSelectionValidator.message=You must select an event."})
ShowInTimelineDialog(TimeLineController controller, AbstractFile file) throws TskCoreException { ShowInTimelineDialog(TimeLineController controller, AbstractFile file) {
this(controller, controller.getEventsModel().getEventIDsForFile(file, false)); this(controller, controller.getEventsModel().getEventIDsForFile(file, false));
/* /*
@ -301,11 +293,11 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
/** /**
* Construct this Dialog's "result" from the given event. * Construct this Dialog's "result" from the given event.
* *
* @param selectedEvent The TimeLineEvent to include in the EventInTimeRange * @param selectedEvent The SingleEvent to include in the EventInTimeRange
* *
* @return The EventInTimeRange that is the "result" of this dialog. * @return The EventInTimeRange that is the "result" of this dialog.
*/ */
private ViewInTimelineRequestedEvent makeEventInTimeRange(TimelineEvent selectedEvent) { private ViewInTimelineRequestedEvent makeEventInTimeRange(SingleEvent selectedEvent) {
Duration selectedDuration = unitComboBox.getSelectionModel().getSelectedItem().getBaseUnit().getDuration().multipliedBy(amountSpinner.getValue()); Duration selectedDuration = unitComboBox.getSelectionModel().getSelectedItem().getBaseUnit().getDuration().multipliedBy(amountSpinner.getValue());
Interval range = IntervalUtils.getIntervalAround(Instant.ofEpochMilli(selectedEvent.getStartMillis()), selectedDuration); Interval range = IntervalUtils.getIntervalAround(Instant.ofEpochMilli(selectedEvent.getStartMillis()), selectedDuration);
return new ViewInTimelineRequestedEvent(Collections.singleton(selectedEvent.getEventID()), range); return new ViewInTimelineRequestedEvent(Collections.singleton(selectedEvent.getEventID()), range);
@ -364,7 +356,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
setGraphic(null); setGraphic(null);
} else { } else {
setText(item.getDisplayName()); setText(item.getDisplayName());
setGraphic(new ImageView(EventTypeUtils.getImagePath(item))); setGraphic(new ImageView(item.getFXImage()));
} }
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -19,19 +19,15 @@
package org.sleuthkit.autopsy.timeline; package org.sleuthkit.autopsy.timeline;
/** /**
* The org.openide.modules.OnStart annotation tells NetBeans to invoke this *
* class's run method.
*/ */
@org.openide.modules.OnStart public class TimeLineException extends Exception {
public class OnStart implements Runnable {
/** public TimeLineException(String string, Exception e) {
* This method is invoked by virtue of the OnStart annotation on the this super(string, e);
* class }
*/
@Override public TimeLineException(String string) {
public void run() { super(string);
TimeLineModule.onStart();
} }
} }

View File

@ -1,131 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.logging.Level;
import javafx.application.Platform;
import javax.swing.SwingUtilities;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Manages listeners and the controller.
*
*/
public class TimeLineModule {
private static final Logger logger = Logger.getLogger(TimeLineModule.class.getName());
private static final Object controllerLock = new Object();
private static TimeLineController controller;
/**
* provides static utilities, can not be instantiated
*/
private TimeLineModule() {
}
/**
* Get instance of the controller for the current case
*
* @return the controller for the current case.
*
* @throws NoCurrentCaseException If there is no case open.
* @throws TskCoreException If there was a problem accessing the case
* database.
*
*/
public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException {
synchronized (controllerLock) {
if (controller == null) {
controller = new TimeLineController(Case.getCurrentCaseThrows());
}
return controller;
}
}
/**
* This method is invoked by virtue of the OnStart annotation on the OnStart
* class class
*/
static void onStart() {
Platform.setImplicitExit(false);
logger.info("Setting up TimeLine listeners"); //NON-NLS
IngestManager.getInstance().addIngestModuleEventListener(new IngestModuleEventListener());
Case.addPropertyChangeListener(new CaseEventListener());
}
/**
* Listener for case events.
*/
static private class CaseEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
getController().handleCaseEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) {
// we care only about case closing here
if (evt.getNewValue() == null) {
synchronized (controllerLock) {
if (controller != null) {
SwingUtilities.invokeLater(controller::shutDownTimeLine);
}
controller = null;
}
}
}
}
}
/**
* Listener for IngestModuleEvents
*/
static private class IngestModuleEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
getController().handleIngestModuleEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
}
}
}

View File

@ -18,7 +18,6 @@
*/ */
package org.sleuthkit.autopsy.timeline; package org.sleuthkit.autopsy.timeline;
import com.google.common.collect.ImmutableList;
import java.awt.BorderLayout; import java.awt.BorderLayout;
import java.awt.Component; import java.awt.Component;
import java.awt.KeyboardFocusManager; import java.awt.KeyboardFocusManager;
@ -59,6 +58,7 @@ import org.openide.windows.RetainLocation;
import org.openide.windows.TopComponent; import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager; import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.actions.AddBookmarkTagAction; import org.sleuthkit.autopsy.actions.AddBookmarkTagAction;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent; import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
import org.sleuthkit.autopsy.corecomponents.DataContentPanel; import org.sleuthkit.autopsy.corecomponents.DataContentPanel;
import org.sleuthkit.autopsy.corecomponents.DataResultPanel; import org.sleuthkit.autopsy.corecomponents.DataResultPanel;
@ -164,9 +164,7 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
*/ */
@Override @Override
public void invalidated(Observable observable) { public void invalidated(Observable observable) {
// make a copy because this list gets updated as the user navigates around List<Long> selectedEventIDs = controller.getSelectedEventIDs();
// and causes concurrent access exceptions
List<Long> selectedEventIDs = ImmutableList.copyOf(controller.getSelectedEventIDs());
//depending on the active view mode, we either update the dataResultPanel, or update the contentViewerPanel directly. //depending on the active view mode, we either update the dataResultPanel, or update the contentViewerPanel directly.
switch (controller.getViewMode()) { switch (controller.getViewMode()) {
@ -197,6 +195,9 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
contentViewerPanel.setNode(null); contentViewerPanel.setNode(null);
} }
}); });
} catch (NoCurrentCaseException ex) {
//Since the case is closed, the user probably doesn't care about this, just log it as a precaution.
logger.log(Level.SEVERE, "There was no case open to lookup the Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to lookup Sleuthkit object backing a SingleEvent.", ex); // NON-NLS logger.log(Level.SEVERE, "Failed to lookup Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
Platform.runLater(() -> { Platform.runLater(() -> {
@ -281,7 +282,7 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
Platform.runLater(this::initFXComponents); Platform.runLater(this::initFXComponents);
//set up listeners //set up listeners
TimeLineController.timeZoneProperty().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString())); TimeLineController.getTimeZone().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString()));
controller.getSelectedEventIDs().addListener(selectedEventsListener); controller.getSelectedEventIDs().addListener(selectedEventsListener);
//Listen to ViewMode and adjust GUI componenets as needed. //Listen to ViewMode and adjust GUI componenets as needed.

View File

@ -1,21 +1,9 @@
/* /*
* Autopsy Forensic Browser * To change this license header, choose License Headers in Project Properties.
* * To change this template file, choose Tools | Templates
* Copyright 2011-2016 Basis Technology Corp. * and open the template in the editor.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline; package org.sleuthkit.autopsy.timeline;
import javafx.scene.control.ListCell; import javafx.scene.control.ListCell;

View File

@ -1,319 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.actions;
import java.awt.Dialog;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import static java.util.Arrays.asList;
import java.util.List;
import java.util.Objects;
import java.util.logging.Level;
import javafx.application.Platform;
import javafx.embed.swing.JFXPanel;
import javafx.fxml.FXML;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonBase;
import javafx.scene.control.ButtonType;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.ComboBox;
import javafx.scene.control.DialogPane;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.util.StringConverter;
import javax.swing.JDialog;
import javax.swing.SwingUtilities;
import jfxtras.scene.control.LocalDateTimeTextField;
import org.apache.commons.lang3.StringUtils;
import org.controlsfx.control.action.Action;
import org.controlsfx.control.textfield.TextFields;
import org.controlsfx.tools.ValueExtractor;
import org.controlsfx.validation.ValidationSupport;
import org.controlsfx.validation.Validator;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.timeline.EventType;
/**
* Action that allows the user the manually create timeline events. It prompts
* the user for event data and then adds it to the case via an artifact.
*/
@NbBundle.Messages({
"AddManualEvent.text=Add Event",
"AddManualEvent.longText=Manually add an event to the timeline."})
public class AddManualEvent extends Action {
private final static Logger logger = Logger.getLogger(AddManualEvent.class.getName());
private static final String MANUAL_CREATION = "Manual Creation"; //NON-NLS
private static final Image ADD_EVENT_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/add.png", 16, 16, true, true, true); // NON-NLS
/**
* Initialize the custom value extractor used by the ValidationSupport for
* the LocalDateTimeTextField in the EventCreationDialogPane.
*/
static {
ValueExtractor.addObservableValueExtractor(LocalDateTimeTextField.class::isInstance,
control -> ((LocalDateTimeTextField) control).localDateTimeProperty());
}
/**
* Create an Action that allows the user the manually create timeline
* events. It prompts the user for event data with a dialog and then adds it
* to the case via an artifact. The datetiem in the dialog will be set to
* "now" when the action is invoked.
*
* @param controller The controller for this action to use.
*
*/
public AddManualEvent(TimeLineController controller) {
this(controller, null);
}
/**
* Create an Action that allows the user the manually create timeline
* events. It prompts the user for event data with a dialog and then adds it
* to the case via an artifact.
*
* @param controller The controller for this action to use.
* @param epochMillis The initial datetime to populate the dialog with. The
* user can ove ride this.
*/
public AddManualEvent(TimeLineController controller, Long epochMillis) {
super(Bundle.AddManualEvent_text());
setGraphic(new ImageView(ADD_EVENT_IMAGE));
setLongText(Bundle.AddManualEvent_longText());
setEventHandler(actionEvent -> SwingUtilities.invokeLater(() -> {
JEventCreationDialog dialog = new JEventCreationDialog(controller, epochMillis, SwingUtilities.windowForComponent(controller.getTopComponent()));
dialog.setVisible(true);
//actual event creation happens in the ok button listener.
}));
}
/**
* Use the supplied ManualEventInfo to make an TSK_TL_EVENT artifact which
* will trigger adding a TimelineEvent.
*
* @param eventInfo The ManualEventInfo with the info needed to create an
* event.
*
* @throws IllegalArgumentException
*/
@NbBundle.Messages({
"AddManualEvent.createArtifactFailed=Failed to create artifact for event.",
"AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard."})
private void addEvent(TimeLineController controller, ManualEventInfo eventInfo) throws IllegalArgumentException {
SleuthkitCase sleuthkitCase = controller.getEventsModel().getSleuthkitCase();
try {
//Use the current examiners name plus a fixed string as the source / module name.
String source = MANUAL_CREATION + ": " + sleuthkitCase.getCurrentExaminer().getLoginName();
BlackboardArtifact artifact = sleuthkitCase.newBlackboardArtifact(TSK_TL_EVENT, eventInfo.datasource.getId());
artifact.addAttributes(asList(
new BlackboardAttribute(
TSK_TL_EVENT_TYPE, source,
EventType.USER_CREATED.getTypeID()),
new BlackboardAttribute(
TSK_DESCRIPTION, source,
eventInfo.description),
new BlackboardAttribute(
TSK_DATETIME, source,
eventInfo.time)
));
try {
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creatig new artifact.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_createArtifactFailed(), ButtonType.OK).showAndWait();
}
}
/**
* Subclass of JDialog used to dislpay the JFXPanel with the event creation
* widgets.
*/
private final class JEventCreationDialog extends JDialog {
private final JFXPanel jfxPanel = new JFXPanel();
private JEventCreationDialog(TimeLineController controller, Long epochMillis, java.awt.Window owner) {
super(owner, Bundle.AddManualEvent_text(), Dialog.ModalityType.DOCUMENT_MODAL);
setIconImages(owner.getIconImages());
setResizable(false);
add(jfxPanel);
// make and configure the JavaFX components.
Platform.runLater(() -> {
// Custom DialogPane defined below.
EventCreationDialogPane customPane = new EventCreationDialogPane(controller, epochMillis);
//cancel button just closes the dialog
((ButtonBase) customPane.lookupButton(ButtonType.CANCEL)).setOnAction(event -> dispose());
//configure ok button to pull ManualEventInfo object and add it to case.
((ButtonBase) customPane.lookupButton(ButtonType.OK)).setOnAction(event -> {
ManualEventInfo manualEventInfo = customPane.getManualEventInfo();
if (manualEventInfo != null) {
addEvent(controller, manualEventInfo);
}
dispose(); //close and dispose the dialog.
});
jfxPanel.setScene(new Scene(customPane));
customPane.installValidation();
SwingUtilities.invokeLater(() -> {
//size and position dialog on EDT
pack();
setLocationRelativeTo(owner);
});
});
}
/**
* The DialogPane that hosts the controls/widgets that allows the user
* to enter the event information.
*/
private class EventCreationDialogPane extends DialogPane {
@FXML
private ChoiceBox<DataSource> dataSourceChooser;
@FXML
private TextField descriptionTextField;
@FXML
private ComboBox<String> timeZoneChooser;
@FXML
private LocalDateTimeTextField timePicker;
private final List<String> timeZoneList = TimeZoneUtils.createTimeZoneList();
private final ValidationSupport validationSupport = new ValidationSupport();
private final TimeLineController controller;
private EventCreationDialogPane(TimeLineController controller, Long epochMillis) {
this.controller = controller;
FXMLConstructor.construct(this, "EventCreationDialog.fxml"); //NON-NLS
if (epochMillis == null) {
timePicker.setLocalDateTime(LocalDateTime.now());
} else {
timePicker.setLocalDateTime(LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMillis), TimeLineController.getTimeZoneID()));
}
}
@FXML
@NbBundle.Messages({"# {0} - datasource name", "# {1} - datasource id",
"AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})",
"AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case."})
private void initialize() {
assert descriptionTextField != null : "fx:id=\"descriptionTextField\" was not injected: check your FXML file 'EventCreationDialog.fxml'.";//NON-NLS
timeZoneChooser.getItems().setAll(timeZoneList);
timeZoneChooser.getSelectionModel().select(TimeZoneUtils.createTimeZoneString(TimeLineController.getTimeZone()));
TextFields.bindAutoCompletion(timeZoneChooser.getEditor(), timeZoneList);
dataSourceChooser.setConverter(new StringConverter<DataSource>() {
@Override
public String toString(DataSource dataSource) {
return Bundle.AddManualEvent_EventCreationDialogPane_dataSourceStringConverter_template(dataSource.getName(), dataSource.getId());
}
@Override
public DataSource fromString(String string) {
throw new UnsupportedOperationException(); // This method should never get called.
}
});
try {
dataSourceChooser.getItems().setAll(controller.getAutopsyCase().getSleuthkitCase().getDataSources());
dataSourceChooser.getSelectionModel().select(0);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting datasources in case.", ex);//NON-NLS
SwingUtilities.invokeLater(() -> MessageNotifyUtil.Message.error(Bundle.AddManualEvent_EventCreationDialogPane_initialize_dataSourcesError()));
}
}
/**
* Install/Configure the ValidationSupport.
*/
@NbBundle.Messages({
"AddManualEvent.validation.description=Description is required.",
"AddManualEvent.validation.datetime=Invalid datetime",
"AddManualEvent.validation.timezone=Invalid time zone",})
private void installValidation() {
validationSupport.registerValidator(descriptionTextField, false,
Validator.createEmptyValidator(Bundle.AddManualEvent_validation_description()));
validationSupport.registerValidator(timePicker, false,
Validator.createPredicateValidator(Objects::nonNull, Bundle.AddManualEvent_validation_description()));
validationSupport.registerValidator(timeZoneChooser, false,
Validator.createPredicateValidator((String zone) -> timeZoneList.contains(zone.trim()), Bundle.AddManualEvent_validation_timezone()));
validationSupport.initInitialDecoration();
//The ok button is only enabled if all fields are validated.
lookupButton(ButtonType.OK).disableProperty().bind(validationSupport.invalidProperty());
}
/**
* Combine the user entered data into a ManulEventInfo object.
*
* @return The ManualEventInfo containing the user entered event
* info.
*/
private ManualEventInfo getManualEventInfo() {
//Trim off the offset part of the string from the chooser, to get something that ZoneId can parse.
String zone = StringUtils.substringAfter(timeZoneChooser.getValue(), ")").trim(); //NON-NLS
long toEpochSecond = timePicker.getLocalDateTime().atZone(ZoneId.of(zone)).toEpochSecond();
return new ManualEventInfo(dataSourceChooser.getValue(), descriptionTextField.getText(), toEpochSecond);
}
}
}
/**
* Info required from user to manually create a timeline event.
*/
private static class ManualEventInfo {
private final DataSource datasource;
private final String description;
private final long time;
private ManualEventInfo(DataSource datasource, String description, long time) {
this.datasource = datasource;
this.description = description;
this.time = time;
}
}
}

View File

@ -1,14 +1,3 @@
AddManualEvent.createArtifactFailed=Failed to create artifact for event.
# {0} - datasource name
# {1} - datasource id
AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})
AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case.
AddManualEvent.longText=Manually add an event to the timeline.
AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard.
AddManualEvent.text=Add Event
AddManualEvent.validation.datetime=Invalid datetime
AddManualEvent.validation.description=Description is required.
AddManualEvent.validation.timezone=Invalid time zone
# {0} - action accelerator keys # {0} - action accelerator keys
Back.longText=Back: {0}\nGo back to the last view settings. Back.longText=Back: {0}\nGo back to the last view settings.
Back.text=Back Back.text=Back
@ -21,6 +10,8 @@ OpenReportAction.MissingReportFileMessage=The report file no longer exists.
OpenReportAction.NoAssociatedEditorMessage=There is no associated editor for reports of this type or the associated application failed to launch. OpenReportAction.NoAssociatedEditorMessage=There is no associated editor for reports of this type or the associated application failed to launch.
OpenReportAction.NoOpenInEditorSupportMessage=This platform (operating system) does not support opening a file in an editor this way. OpenReportAction.NoOpenInEditorSupportMessage=This platform (operating system) does not support opening a file in an editor this way.
OpenReportAction.ReportFileOpenPermissionDeniedMessage=Permission to open the report file was denied. OpenReportAction.ReportFileOpenPermissionDeniedMessage=Permission to open the report file was denied.
RebuildDataBase.longText=Update the DB to include new events.
RebuildDataBase.text=Update DB
ResetFilters.text=Reset all filters ResetFilters.text=Reset all filters
RestFilters.longText=Reset all filters to their default state. RestFilters.longText=Reset all filters to their default state.
SaveSnapShotAsReport.action.dialogs.title=Timeline SaveSnapShotAsReport.action.dialogs.title=Timeline
@ -41,12 +32,8 @@ ViewArtifactInTimelineAction.displayName=View Result in Timeline...
ViewFileInTimelineAction.viewFile.displayName=View File in Timeline... ViewFileInTimelineAction.viewFile.displayName=View File in Timeline...
ViewFileInTimelineAction.viewSourceFile.displayName=View Source File in Timeline... ViewFileInTimelineAction.viewSourceFile.displayName=View Source File in Timeline...
ZoomIn.action.text=Zoom in ZoomIn.action.text=Zoom in
ZoomIn.errorMessage=Error zooming in.
ZoomIn.longText=Zoom in to view about half as much time. ZoomIn.longText=Zoom in to view about half as much time.
ZoomOut.action.text=Zoom out ZoomOut.action.text=Zoom out
ZoomOut.disabledProperty.errorMessage=Error getting spanning interval.
ZoomOut.errorMessage=Error zooming out.
ZoomOut.longText=Zoom out to view about 50% more time. ZoomOut.longText=Zoom out to view about 50% more time.
ZoomToEvents.action.text=Zoom to events ZoomToEvents.action.text=Zoom to events
ZoomToEvents.disabledProperty.errorMessage=Error getting spanning interval.
ZoomToEvents.longText=Zoom out to show the nearest events. ZoomToEvents.longText=Zoom out to show the nearest events.

View File

@ -1,52 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<?import javafx.geometry.Insets?>
<?import javafx.scene.control.ButtonType?>
<?import javafx.scene.control.ChoiceBox?>
<?import javafx.scene.control.ComboBox?>
<?import javafx.scene.control.DialogPane?>
<?import javafx.scene.control.Label?>
<?import javafx.scene.control.TextField?>
<?import javafx.scene.layout.ColumnConstraints?>
<?import javafx.scene.layout.GridPane?>
<?import javafx.scene.layout.RowConstraints?>
<?import jfxtras.scene.control.LocalDateTimeTextField?>
<fx:root expanded="true" maxHeight="159.0" maxWidth="555.0" minHeight="159.0" minWidth="555.0" prefHeight="159.0" prefWidth="555.0" type="DialogPane" xmlns="http://javafx.com/javafx/8.0.141" xmlns:fx="http://javafx.com/fxml/1">
<buttonTypes>
<ButtonType fx:constant="OK" />
<ButtonType fx:constant="CANCEL" />
</buttonTypes>
<content>
<GridPane fx:id="gridPane" hgap="5.0" vgap="5.0">
<columnConstraints>
<ColumnConstraints hgrow="NEVER" maxWidth="93.0" minWidth="10.0" />
<ColumnConstraints hgrow="SOMETIMES" maxWidth="193.0" minWidth="10.0" />
<ColumnConstraints hgrow="NEVER" />
<ColumnConstraints hgrow="SOMETIMES" minWidth="10.0" />
</columnConstraints>
<rowConstraints>
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
<RowConstraints fillHeight="false" minHeight="10.0" prefHeight="30.0" vgrow="SOMETIMES" />
</rowConstraints>
<children>
<Label text="Description:" GridPane.rowIndex="1" />
<TextField fx:id="descriptionTextField" prefHeight="26.0" prefWidth="278.0" GridPane.columnIndex="1" GridPane.columnSpan="3" GridPane.rowIndex="1" />
<Label text="DateTime" GridPane.rowIndex="2" />
<Label text="Time Zone" GridPane.columnIndex="2" GridPane.rowIndex="2">
<padding>
<Insets left="15.0" />
</padding>
</Label>
<ComboBox fx:id="timeZoneChooser" editable="true" prefHeight="28.0" prefWidth="214.0" GridPane.columnIndex="3" GridPane.rowIndex="2" />
<LocalDateTimeTextField fx:id="timePicker" prefHeight="26.0" prefWidth="166.0" GridPane.columnIndex="1" GridPane.rowIndex="2" />
<Label text="DataSource:" />
<ChoiceBox fx:id="dataSourceChooser" prefHeight="25.0" prefWidth="168.0" GridPane.columnIndex="1" GridPane.columnSpan="3" />
</children>
<padding>
<Insets bottom="5.0" left="5.0" right="5.0" top="5.0" />
</padding>
</GridPane>
</content>
</fx:root>

View File

@ -23,7 +23,7 @@ import javafx.event.ActionEvent;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
/** /**
* Action that resets the filters to their initial/default state. * Action that resets the filters to their initial/default state.
@ -44,12 +44,12 @@ public class ResetFilters extends Action {
eventsModel = controller.getEventsModel(); eventsModel = controller.getEventsModel();
disabledProperty().bind(new BooleanBinding() { disabledProperty().bind(new BooleanBinding() {
{ {
bind(eventsModel.zoomStateProperty()); bind(eventsModel.zoomParametersProperty());
} }
@Override @Override
protected boolean computeValue() { protected boolean computeValue() {
return eventsModel.zoomStateProperty().getValue().getFilterState().equals(eventsModel.getDefaultFilter()); return eventsModel.zoomParametersProperty().getValue().getFilter().equals(eventsModel.getDefaultFilter());
} }
}); });
setEventHandler((ActionEvent t) -> { setEventHandler((ActionEvent t) -> {

View File

@ -141,7 +141,7 @@ public class SaveSnapshotAsReport extends Action {
reportMainFilePath = new SnapShotReportWriter(currentCase, reportMainFilePath = new SnapShotReportWriter(currentCase,
reportFolderPath, reportFolderPath,
reportName, reportName,
controller.getEventsModel().getZoomState(), controller.getEventsModel().getZoomParamaters(),
generationDate, snapshot).writeReport(); generationDate, snapshot).writeReport();
} catch (IOException ex) { } catch (IOException ex) {
LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS LOGGER.log(Level.SEVERE, "Error writing report to disk at " + reportFolderPath, ex); //NON_NLS

View File

@ -0,0 +1,50 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.actions;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.TimeLineController;
/**
* An action that rebuilds the timeline database to include any new results from
* ingest.
*/
public class UpdateDB extends Action {
private static final Image DB_REFRESH = new Image("org/sleuthkit/autopsy/timeline/images/database_refresh.png");
/**
* Constructor
*
* @param controller The TimeLineController for this action.
*/
@NbBundle.Messages({
"RebuildDataBase.text=Update DB",
"RebuildDataBase.longText=Update the DB to include new events."})
public UpdateDB(TimeLineController controller) {
super(Bundle.RebuildDataBase_text());
setLongText(Bundle.RebuildDataBase_longText());
setGraphic(new ImageView(DB_REFRESH));
setEventHandler(actionEvent -> controller.rebuildRepo());
disabledProperty().bind(controller.eventsDBStaleProperty().not());
}
}

View File

@ -19,13 +19,14 @@
package org.sleuthkit.autopsy.timeline.actions; package org.sleuthkit.autopsy.timeline.actions;
import java.awt.event.ActionEvent; import java.awt.event.ActionEvent;
import java.util.logging.Level; import java.util.Set;
import java.util.stream.Collectors;
import javax.swing.AbstractAction; import javax.swing.AbstractAction;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.actions.SystemAction; import org.openide.util.actions.SystemAction;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.timeline.OpenTimelineAction; import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.ArtifactEventType;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -35,7 +36,13 @@ import org.sleuthkit.datamodel.TskCoreException;
*/ */
public final class ViewArtifactInTimelineAction extends AbstractAction { public final class ViewArtifactInTimelineAction extends AbstractAction {
private static final Logger logger = Logger.getLogger(ViewFileInTimelineAction.class.getName()); private static final long serialVersionUID = 1L;
private static final Set<ArtifactEventType> ARTIFACT_EVENT_TYPES =
EventType.allTypes.stream()
.filter((EventType t) -> t instanceof ArtifactEventType)
.map(ArtifactEventType.class::cast)
.collect(Collectors.toSet());
private final BlackboardArtifact artifact; private final BlackboardArtifact artifact;
@ -47,26 +54,26 @@ public final class ViewArtifactInTimelineAction extends AbstractAction {
@Override @Override
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
try { SystemAction.get(OpenTimelineAction.class).showArtifactInTimeline(artifact);
SystemAction.get(OpenTimelineAction.class).showArtifactInTimeline(artifact);
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error opening Timeline");
logger.log(Level.SEVERE, "Error showing timeline.", ex);
}
} }
/** /**
* Does the given artifact have a datetime attribute? * Does the given artifact have a type that Timeline supports, and does it
* have a positive timestamp in the supported attribute?
* *
* @param artifact The artifact to test for a supported timestamp * @param artifact The artifact to test for a supported timestamp
* *
* @return True if this artifact has a timestamp supported by Timeline. * @return True if this artifact has a timestamp supported by Timeline.
*/ */
public static boolean hasSupportedTimeStamp(BlackboardArtifact artifact) throws TskCoreException { public static boolean hasSupportedTimeStamp(BlackboardArtifact artifact) throws TskCoreException {
//see if the given artifact is a supported type ...
for (BlackboardAttribute attr : artifact.getAttributes()) { for (ArtifactEventType artEventType : ARTIFACT_EVENT_TYPES) {
if (attr.getValueType() == BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) { if (artEventType.getArtifactTypeID() == artifact.getArtifactTypeID()) {
return true; //... and has a non-bogus timestamp in the supported attribute
BlackboardAttribute attribute = artifact.getAttribute(artEventType.getDateTimeAttributeType());
if (null != attribute && attribute.getValueLong() > 0) {
return true;
}
} }
} }
return false; return false;

View File

@ -19,15 +19,11 @@
package org.sleuthkit.autopsy.timeline.actions; package org.sleuthkit.autopsy.timeline.actions;
import java.awt.event.ActionEvent; import java.awt.event.ActionEvent;
import java.util.logging.Level;
import javax.swing.AbstractAction; import javax.swing.AbstractAction;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.openide.util.actions.SystemAction; import org.openide.util.actions.SystemAction;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.timeline.OpenTimelineAction; import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData; import org.sleuthkit.datamodel.TskData;
/** /**
@ -38,8 +34,6 @@ public final class ViewFileInTimelineAction extends AbstractAction {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(ViewFileInTimelineAction.class.getName());
private final AbstractFile file; private final AbstractFile file;
private ViewFileInTimelineAction(AbstractFile file, String displayName) { private ViewFileInTimelineAction(AbstractFile file, String displayName) {
@ -68,11 +62,6 @@ public final class ViewFileInTimelineAction extends AbstractAction {
@Override @Override
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
try { SystemAction.get(OpenTimelineAction.class).showFileInTimeline(file);
SystemAction.get(OpenTimelineAction.class).showFileInTimeline(file);
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error opening Timeline");
logger.log(Level.SEVERE, "Error showing timeline.", ex);
}
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2015-2018 Basis Technology Corp. * Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -18,40 +18,27 @@
*/ */
package org.sleuthkit.autopsy.timeline.actions; package org.sleuthkit.autopsy.timeline.actions;
import java.util.logging.Level;
import javafx.scene.control.Alert;
import javafx.scene.image.Image; import javafx.scene.image.Image;
import javafx.scene.image.ImageView; import javafx.scene.image.ImageView;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.datamodel.TskCoreException;
/** /**
* *
*/ */
public class ZoomIn extends Action { public class ZoomIn extends Action {
private static final Logger logger = Logger.getLogger(ZoomIn.class.getName());
private static final Image MAGNIFIER_IN = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-in-green.png"); //NOI18N NON-NLS private static final Image MAGNIFIER_IN = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-in-green.png"); //NOI18N NON-NLS
@NbBundle.Messages({"ZoomIn.longText=Zoom in to view about half as much time.", @NbBundle.Messages({"ZoomIn.longText=Zoom in to view about half as much time.",
"ZoomIn.action.text=Zoom in", "ZoomIn.action.text=Zoom in"})
"ZoomIn.errorMessage=Error zooming in."
})
public ZoomIn(TimeLineController controller) { public ZoomIn(TimeLineController controller) {
super(Bundle.ZoomIn_action_text()); super(Bundle.ZoomIn_action_text());
setLongText(Bundle.ZoomIn_longText()); setLongText(Bundle.ZoomIn_longText());
setGraphic(new ImageView(MAGNIFIER_IN)); setGraphic(new ImageView(MAGNIFIER_IN));
setEventHandler(actionEvent -> { setEventHandler(actionEvent -> {
try { controller.pushZoomInTime();
controller.pushZoomInTime();
} catch (TskCoreException ex) {
new Alert(Alert.AlertType.ERROR, Bundle.ZoomIn_errorMessage()).showAndWait();
logger.log(Level.SEVERE, "Error zooming in.", ex);
}
}); });
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2015-2018 Basis Technology Corp. * Copyright 2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -18,61 +18,40 @@
*/ */
package org.sleuthkit.autopsy.timeline.actions; package org.sleuthkit.autopsy.timeline.actions;
import java.util.logging.Level;
import javafx.beans.binding.BooleanBinding; import javafx.beans.binding.BooleanBinding;
import javafx.scene.control.Alert;
import javafx.scene.image.Image; import javafx.scene.image.Image;
import javafx.scene.image.ImageView; import javafx.scene.image.ImageView;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
/** /**
* *
*/ */
public class ZoomOut extends Action { public class ZoomOut extends Action {
final private static Logger logger = Logger.getLogger(ZoomOut.class.getName());
private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png"); //NOI18N NON-NLS private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png"); //NOI18N NON-NLS
@NbBundle.Messages({"ZoomOut.longText=Zoom out to view about 50% more time.", @NbBundle.Messages({"ZoomOut.longText=Zoom out to view about 50% more time.",
"ZoomOut.action.text=Zoom out", "ZoomOut.action.text=Zoom out"})
"ZoomOut.errorMessage=Error zooming out.",
"ZoomOut.disabledProperty.errorMessage=Error getting spanning interval."})
public ZoomOut(TimeLineController controller) { public ZoomOut(TimeLineController controller) {
super(Bundle.ZoomOut_action_text()); super(Bundle.ZoomOut_action_text());
setLongText(Bundle.ZoomOut_longText()); setLongText(Bundle.ZoomOut_longText());
setGraphic(new ImageView(MAGNIFIER_OUT)); setGraphic(new ImageView(MAGNIFIER_OUT));
setEventHandler(actionEvent -> { setEventHandler(actionEvent -> controller.pushZoomOutTime());
try {
controller.pushZoomOutTime();
} catch (TskCoreException ex) {
new Alert(Alert.AlertType.ERROR, Bundle.ZoomOut_errorMessage()).showAndWait();
logger.log(Level.SEVERE, "Error zooming out.", ex);
}
});
//disable action when the current time range already encompases the entire case. //disable action when the current time range already encompases the entire case.
disabledProperty().bind(new BooleanBinding() { disabledProperty().bind(new BooleanBinding() {
private final FilteredEventsModel eventsModel = controller.getEventsModel(); private final FilteredEventsModel eventsModel = controller.getEventsModel();
{ {
bind(eventsModel.zoomStateProperty(), eventsModel.timeRangeProperty()); bind(eventsModel.zoomParametersProperty(), eventsModel.timeRangeProperty());
} }
@Override @Override
protected boolean computeValue() { protected boolean computeValue() {
try { return eventsModel.timeRangeProperty().get().contains(eventsModel.getSpanningInterval());
return eventsModel.getTimeRange().contains(eventsModel.getSpanningInterval());
} catch (TskCoreException ex) {
new Alert(Alert.AlertType.ERROR, Bundle.ZoomOut_disabledProperty_errorMessage()).showAndWait();
logger.log(Level.SEVERE, "Error getting spanning interval.", ex);
return true;
}
} }
}); });
} }

View File

@ -18,40 +18,29 @@
*/ */
package org.sleuthkit.autopsy.timeline.actions; package org.sleuthkit.autopsy.timeline.actions;
import java.util.logging.Level;
import javafx.beans.binding.BooleanBinding; import javafx.beans.binding.BooleanBinding;
import javafx.scene.control.Alert;
import javafx.scene.image.Image; import javafx.scene.image.Image;
import javafx.scene.image.ImageView; import javafx.scene.image.ImageView;
import org.controlsfx.control.action.Action; import org.controlsfx.control.action.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.TimeLineController; import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.FilteredEventsModel; import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
import org.sleuthkit.datamodel.TskCoreException;
/** /**
* *
*/ */
public class ZoomToEvents extends Action { public class ZoomToEvents extends Action {
private static final Logger logger = Logger.getLogger(ZoomToEvents.class.getName());
private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png", 16, 16, true, true); //NOI18N NON-NLS private static final Image MAGNIFIER_OUT = new Image("/org/sleuthkit/autopsy/timeline/images/magnifier-zoom-out-red.png", 16, 16, true, true); //NOI18N NON-NLS
@NbBundle.Messages({"ZoomToEvents.action.text=Zoom to events", @NbBundle.Messages({"ZoomToEvents.action.text=Zoom to events",
"ZoomToEvents.longText=Zoom out to show the nearest events.", "ZoomToEvents.longText=Zoom out to show the nearest events."})
"ZoomToEvents.disabledProperty.errorMessage=Error getting spanning interval."})
public ZoomToEvents(final TimeLineController controller) { public ZoomToEvents(final TimeLineController controller) {
super(Bundle.ZoomToEvents_action_text()); super(Bundle.ZoomToEvents_action_text());
setLongText(Bundle.ZoomToEvents_longText()); setLongText(Bundle.ZoomToEvents_longText());
setGraphic(new ImageView(MAGNIFIER_OUT)); setGraphic(new ImageView(MAGNIFIER_OUT));
setEventHandler(actionEvent -> { setEventHandler(actionEvent -> {
try { controller.zoomOutToActivity();
controller.zoomOutToActivity();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error invoking ZoomToEvents action", ex);
new Alert(Alert.AlertType.ERROR, "Error zomming").showAndWait();
}
}); });
//disable action when the current time range already encompases the entire case. //disable action when the current time range already encompases the entire case.
@ -59,19 +48,13 @@ public class ZoomToEvents extends Action {
private final FilteredEventsModel eventsModel = controller.getEventsModel(); private final FilteredEventsModel eventsModel = controller.getEventsModel();
{ {
bind(eventsModel.zoomStateProperty()); bind(eventsModel.zoomParametersProperty());
} }
@Override @Override
protected boolean computeValue() { protected boolean computeValue() {
try { //TODO: do a db query to see if using this action will actually result in viewable events
//TODO: do a db query to see if using this action will actually result in viewable events return eventsModel.zoomParametersProperty().getValue().getTimeRange().contains(eventsModel.getSpanningInterval());
return eventsModel.getTimeRange().contains(eventsModel.getSpanningInterval());
} catch (TskCoreException ex) {
new Alert(Alert.AlertType.ERROR, Bundle.ZoomToEvents_disabledProperty_errorMessage()).showAndWait();
logger.log(Level.SEVERE, "Error getting spanning interval.", ex);
return true;
}
} }
}); });
} }

View File

@ -0,0 +1,152 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.datamodel;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.python.google.common.collect.ImmutableSet;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
/**
* A container for several events that have the same timestamp and description
* and are backed by the same file. Used in the ListView to coalesce the file
* system events for a file when they have the same timestamp.
*/
public class CombinedEvent {
private final long fileID;
private final long epochMillis;
private final String description;
/**
* A map from EventType to event ID.
*/
private final Map<EventType, Long> eventTypeMap = new HashMap<>();
/**
* Constructor
*
* @param epochMillis The timestamp for this event, in millis from the Unix
* epoch.
* @param description The full description shared by all the combined events
* @param fileID The ID of the file shared by all the combined events.
* @param eventMap A map from EventType to event ID.
*/
public CombinedEvent(long epochMillis, String description, long fileID, Map<EventType, Long> eventMap) {
this.epochMillis = epochMillis;
this.description = description;
eventTypeMap.putAll(eventMap);
this.fileID = fileID;
}
/**
* Get the timestamp of this event as millis from the Unix epoch.
*
* @return The timestamp of this event as millis from the Unix epoch.
*/
public long getStartMillis() {
return epochMillis;
}
/**
* Get the full description shared by all the combined events.
*
* @return The full description shared by all the combined events.
*/
public String getDescription() {
return description;
}
/**
* Get the obj ID of the file shared by the combined events.
*
* @return The obj ID of the file shared by the combined events.
*/
public long getFileID() {
return fileID;
}
/**
* Get the types of the combined events.
*
* @return The types of the combined events.
*/
public Set<EventType> getEventTypes() {
return eventTypeMap.keySet();
}
/**
* Get the event IDs of the combined events.
*
* @return The event IDs of the combined events.
*/
public ImmutableSet<Long> getEventIDs() {
return ImmutableSet.copyOf(eventTypeMap.values());
}
/**
* Get the event ID of one event that is representative of all the combined
* events. It can be used to look up a SingleEvent with more details, for
* example.
*
* @return An arbitrary representative event ID for the combined events.
*/
public Long getRepresentativeEventID() {
return eventTypeMap.values().stream().findFirst().get();
}
@Override
public int hashCode() {
int hash = 3;
hash = 53 * hash + (int) (this.fileID ^ (this.fileID >>> 32));
hash = 53 * hash + (int) (this.epochMillis ^ (this.epochMillis >>> 32));
hash = 53 * hash + Objects.hashCode(this.description);
hash = 53 * hash + Objects.hashCode(this.eventTypeMap);
return hash;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final CombinedEvent other = (CombinedEvent) obj;
if (this.fileID != other.fileID) {
return false;
}
if (this.epochMillis != other.epochMillis) {
return false;
}
if (!Objects.equals(this.description, other.description)) {
return false;
}
if (!Objects.equals(this.eventTypeMap, other.eventTypeMap)) {
return false;
}
return true;
}
}

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018-2019 Basis Technology Corp. * Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -16,29 +16,59 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import static java.util.Collections.emptySet; import java.util.Collection;
import static java.util.Collections.singleton;
import java.util.Comparator; import java.util.Comparator;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import javax.annotation.concurrent.Immutable;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils; import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
import org.sleuthkit.datamodel.timeline.EventType;
import org.sleuthkit.datamodel.timeline.TimelineEvent;
/** /**
* Represents a set of other events clustered together. All the sub events * Represents a set of other events clustered together. All the sub events
* should have the same type and matching descriptions at the designated "zoom * should have the same type and matching descriptions at the designated "zoom
* level", and be "close together" in time. * level", and be "close together" in time.
*/ */
@Immutable
public class EventCluster implements MultiEvent<EventStripe> { public class EventCluster implements MultiEvent<EventStripe> {
/**
* merge two event clusters into one new event cluster.
*
* @param cluster1
* @param cluster2
*
* @return a new event cluster that is the result of merging the given
* events clusters
*/
public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) {
if (cluster1.getEventType() != cluster2.getEventType()) {
throw new IllegalArgumentException("event clusters are not compatible: they have different types");
}
if (!cluster1.getDescription().equals(cluster2.getDescription())) {
throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions");
}
Sets.SetView<Long> idsUnion =
Sets.union(cluster1.getEventIDs(), cluster2.getEventIDs());
Sets.SetView<Long> hashHitsUnion =
Sets.union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits());
Sets.SetView<Long> taggedUnion =
Sets.union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags());
return new EventCluster(IntervalUtils.span(cluster1.span, cluster2.span),
cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion,
cluster1.getDescription(), cluster1.lod);
}
final private EventStripe parent; final private EventStripe parent;
/** /**
@ -64,81 +94,39 @@ public class EventCluster implements MultiEvent<EventStripe> {
/** /**
* the set of ids of the clustered events * the set of ids of the clustered events
*/ */
final private Set<Long> eventIDs; final private ImmutableSet<Long> eventIDs;
/** /**
* the ids of the subset of clustered events that have at least one tag * the ids of the subset of clustered events that have at least one tag
* applied to them * applied to them
*/ */
private final Set<Long> tagged; private final ImmutableSet<Long> tagged;
/** /**
* the ids of the subset of clustered events that have at least one hash set * the ids of the subset of clustered events that have at least one hash set
* hit * hit
*/ */
private final Set<Long> hashHits; private final ImmutableSet<Long> hashHits;
/** private EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
* merge two event clusters into one new event cluster. Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod,
* EventStripe parent) {
* @param cluster1
* @param cluster2
*
* @return a new event cluster that is the result of merging the given
* events clusters
*/
public static EventCluster merge(EventCluster cluster1, EventCluster cluster2) {
if (cluster1.getEventType() != cluster2.getEventType()) {
throw new IllegalArgumentException("event clusters are not compatible: they have different types");
}
if (!cluster1.getDescription().equals(cluster2.getDescription())) {
throw new IllegalArgumentException("event clusters are not compatible: they have different descriptions");
}
Interval spanningInterval = IntervalUtils.span(cluster1.span, cluster2.span);
Set<Long> idsUnion = Sets.union(cluster1.getEventIDs(), cluster2.getEventIDs());
Set<Long> hashHitsUnion = Sets.union(cluster1.getEventIDsWithHashHits(), cluster2.getEventIDsWithHashHits());
Set<Long> taggedUnion = Sets.union(cluster1.getEventIDsWithTags(), cluster2.getEventIDsWithTags());
return new EventCluster(spanningInterval,
cluster1.getEventType(), idsUnion, hashHitsUnion, taggedUnion,
cluster1.getDescription(), cluster1.lod);
}
private EventCluster(Interval spanningInterval, EventType type, Set<Long> eventIDs,
Set<Long> hashHits, Set<Long> tagged, String description, DescriptionLoD lod,
EventStripe parent) {
this.span = spanningInterval; this.span = spanningInterval;
this.type = type; this.type = type;
this.hashHits = hashHits; this.hashHits = ImmutableSet.copyOf(hashHits);
this.tagged = tagged; this.tagged = ImmutableSet.copyOf(tagged);
this.description = description; this.description = description;
this.eventIDs = eventIDs; this.eventIDs = ImmutableSet.copyOf(eventIDs);
this.lod = lod; this.lod = lod;
this.parent = parent; this.parent = parent;
} }
public EventCluster(Interval spanningInterval, EventType type, Set<Long> eventIDs, public EventCluster(Interval spanningInterval, EventType type, Collection<Long> eventIDs,
Set<Long> hashHits, Set<Long> tagged, String description, DescriptionLoD lod) { Collection<Long> hashHits, Collection<Long> tagged, String description, DescriptionLoD lod) {
this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null); this(spanningInterval, type, eventIDs, hashHits, tagged, description, lod, null);
} }
public EventCluster(TimelineEvent event, EventType type, DescriptionLoD lod) {
this(new Interval(event.getStartMillis(), event.getEndMillis()),
type,
singleton(event.getEventID()),
event.isHashHit() ? singleton(event.getEventID()) : emptySet(),
event.isTagged() ? singleton(event.getEventID()) : emptySet(),
event.getDescription(lod),
lod);
}
/** /**
* get the EventStripe (if any) that contains this cluster * get the EventStripe (if any) that contains this cluster
* *
@ -177,17 +165,17 @@ public class EventCluster implements MultiEvent<EventStripe> {
} }
@Override @Override
public Set<Long> getEventIDs() { public ImmutableSet<Long> getEventIDs() {
return eventIDs; return eventIDs;
} }
@Override @Override
public Set<Long> getEventIDsWithHashHits() { public ImmutableSet<Long> getEventIDsWithHashHits() {
return hashHits; return hashHits;
} }
@Override @Override
public Set<Long> getEventIDsWithTags() { public ImmutableSet<Long> getEventIDsWithTags() {
return tagged; return tagged;
} }
@ -216,13 +204,12 @@ public class EventCluster implements MultiEvent<EventStripe> {
* EventBundle as the parent. * EventBundle as the parent.
*/ */
public EventCluster withParent(EventStripe parent) { public EventCluster withParent(EventStripe parent) {
return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent); return new EventCluster(span, type, eventIDs, hashHits, tagged, description, lod, parent);
} }
@Override @Override
public SortedSet<EventCluster> getClusters() { public SortedSet<EventCluster> getClusters() {
return DetailsViewModel.copyAsSortedSet(singleton(this), Comparator.comparing(cluster -> true)); return ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis)).add(this).build();
} }
@Override @Override
@ -261,6 +248,9 @@ public class EventCluster implements MultiEvent<EventStripe> {
if (this.lod != other.lod) { if (this.lod != other.lod) {
return false; return false;
} }
return Objects.equals(this.eventIDs, other.eventIDs); if (!Objects.equals(this.eventIDs, other.eventIDs)) {
return false;
}
return true;
} }
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018-2019 Basis Technology Corp. * Copyright 2015-16 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -16,28 +16,39 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Sets; import com.google.common.collect.ImmutableSet;
import static java.util.Collections.singleton; import com.google.common.collect.ImmutableSortedSet;
import static java.util.Comparator.comparing; import java.util.Comparator;
import java.util.Objects; import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import org.sleuthkit.datamodel.DescriptionLoD; import javax.annotation.concurrent.Immutable;
import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
/** /**
* A 'collection' of {@link EventCluster}s, all having the same type, * A 'collection' of {@link EventCluster}s, all having the same type,
* description, and zoom levels, but not necessarily close together in time. * description, and zoom levels, but not necessarily close together in time.
*/ */
@Immutable
public final class EventStripe implements MultiEvent<EventCluster> { public final class EventStripe implements MultiEvent<EventCluster> {
public static EventStripe merge(EventStripe u, EventStripe v) {
Preconditions.checkNotNull(u);
Preconditions.checkNotNull(v);
Preconditions.checkArgument(Objects.equals(u.description, v.description));
Preconditions.checkArgument(Objects.equals(u.lod, v.lod));
Preconditions.checkArgument(Objects.equals(u.type, v.type));
Preconditions.checkArgument(Objects.equals(u.parent, v.parent));
return new EventStripe(u, v);
}
private final EventCluster parent; private final EventCluster parent;
private final SortedSet<EventCluster> clusters; private final ImmutableSortedSet<EventCluster> clusters;
/** /**
* the type of all the events * the type of all the events
@ -57,44 +68,32 @@ public final class EventStripe implements MultiEvent<EventCluster> {
/** /**
* the set of ids of the events * the set of ids of the events
*/ */
private final Set<Long> eventIDs; private final ImmutableSet<Long> eventIDs;
/** /**
* the ids of the subset of events that have at least one tag applied to * the ids of the subset of events that have at least one tag applied to
* them * them
*/ */
private final Set<Long> tagged; private final ImmutableSet<Long> tagged;
/** /**
* the ids of the subset of events that have at least one hash set hit * the ids of the subset of events that have at least one hash set hit
*/ */
private final Set<Long> hashHits; private final ImmutableSet<Long> hashHits;
public static EventStripe merge(EventStripe stripeA, EventStripe stripeB) {
Preconditions.checkNotNull(stripeA);
Preconditions.checkNotNull(stripeB);
Preconditions.checkArgument(Objects.equals(stripeA.description, stripeB.description));
Preconditions.checkArgument(Objects.equals(stripeA.lod, stripeB.lod));
Preconditions.checkArgument(Objects.equals(stripeA.type, stripeB.type));
Preconditions.checkArgument(Objects.equals(stripeA.parent, stripeB.parent));
return new EventStripe(stripeA, stripeB);
}
public EventStripe withParent(EventCluster parent) { public EventStripe withParent(EventCluster parent) {
if (Objects.nonNull(this.parent)) { if (java.util.Objects.nonNull(this.parent)) {
throw new IllegalStateException("Event Stripe already has a parent!"); throw new IllegalStateException("Event Stripe already has a parent!");
} }
return new EventStripe(parent, this.type, this.description, this.lod, clusters, eventIDs, tagged, hashHits); return new EventStripe(parent, this.type, this.description, this.lod, clusters, eventIDs, tagged, hashHits);
} }
private EventStripe(EventCluster parent, EventType type, String description, private EventStripe(EventCluster parent, EventType type, String description, DescriptionLoD lod, SortedSet<EventCluster> clusters, ImmutableSet<Long> eventIDs, ImmutableSet<Long> tagged, ImmutableSet<Long> hashHits) {
DescriptionLoD lod, SortedSet<EventCluster> clusters,
Set<Long> eventIDs, Set<Long> tagged, Set<Long> hashHits) {
this.parent = parent; this.parent = parent;
this.type = type; this.type = type;
this.description = description; this.description = description;
this.lod = lod; this.lod = lod;
this.clusters = clusters; this.clusters = ImmutableSortedSet.copyOf(Comparator.comparing(EventCluster::getStartMillis), clusters);
this.eventIDs = eventIDs; this.eventIDs = eventIDs;
this.tagged = tagged; this.tagged = tagged;
@ -102,8 +101,9 @@ public final class EventStripe implements MultiEvent<EventCluster> {
} }
public EventStripe(EventCluster cluster) { public EventStripe(EventCluster cluster) {
this.clusters = DetailsViewModel.copyAsSortedSet(singleton(cluster.withParent(this)),
comparing(EventCluster::getStartMillis)); this.clusters = ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis))
.add(cluster.withParent(this)).build();
type = cluster.getEventType(); type = cluster.getEventType();
description = cluster.getDescription(); description = cluster.getDescription();
@ -114,16 +114,28 @@ public final class EventStripe implements MultiEvent<EventCluster> {
this.parent = null; this.parent = null;
} }
private EventStripe(EventStripe stripeA, EventStripe stripeB) { private EventStripe(EventStripe u, EventStripe v) {
clusters = DetailsViewModel.copyAsSortedSet(Sets.union(stripeA.getClusters(), stripeB.getClusters()), comparing(EventCluster::getStartMillis)); clusters = ImmutableSortedSet.orderedBy(Comparator.comparing(EventCluster::getStartMillis))
.addAll(u.getClusters())
.addAll(v.getClusters())
.build();
type = stripeA.getEventType(); type = u.getEventType();
description = stripeA.getDescription(); description = u.getDescription();
lod = stripeA.getDescriptionLoD(); lod = u.getDescriptionLoD();
eventIDs = Sets.union(stripeA.getEventIDs(), stripeB.getEventIDs()); eventIDs = ImmutableSet.<Long>builder()
tagged = Sets.union(stripeA.getEventIDsWithTags(), stripeB.getEventIDsWithTags()); .addAll(u.getEventIDs())
hashHits = Sets.union(stripeA.getEventIDsWithHashHits(), stripeB.getEventIDsWithHashHits()); .addAll(v.getEventIDs())
parent = stripeA.getParent().orElse(stripeB.getParent().orElse(null)); .build();
tagged = ImmutableSet.<Long>builder()
.addAll(u.getEventIDsWithTags())
.addAll(v.getEventIDsWithTags())
.build();
hashHits = ImmutableSet.<Long>builder()
.addAll(u.getEventIDsWithHashHits())
.addAll(v.getEventIDsWithHashHits())
.build();
parent = u.getParent().orElse(v.getParent().orElse(null));
} }
@Override @Override
@ -131,7 +143,6 @@ public final class EventStripe implements MultiEvent<EventCluster> {
return Optional.ofNullable(parent); return Optional.ofNullable(parent);
} }
@Override
public Optional<EventStripe> getParentStripe() { public Optional<EventStripe> getParentStripe() {
if (getParent().isPresent()) { if (getParent().isPresent()) {
return getParent().get().getParent(); return getParent().get().getParent();
@ -156,17 +167,17 @@ public final class EventStripe implements MultiEvent<EventCluster> {
} }
@Override @Override
public Set<Long> getEventIDs() { public ImmutableSet<Long> getEventIDs() {
return eventIDs; return eventIDs;
} }
@Override @Override
public Set<Long> getEventIDsWithHashHits() { public ImmutableSet<Long> getEventIDsWithHashHits() {
return hashHits; return hashHits;
} }
@Override @Override
public Set<Long> getEventIDsWithTags() { public ImmutableSet<Long> getEventIDsWithTags() {
return tagged; return tagged;
} }
@ -181,7 +192,7 @@ public final class EventStripe implements MultiEvent<EventCluster> {
} }
@Override @Override
public SortedSet< EventCluster> getClusters() { public ImmutableSortedSet< EventCluster> getClusters() {
return clusters; return clusters;
} }
@ -225,7 +236,9 @@ public final class EventStripe implements MultiEvent<EventCluster> {
if (this.lod != other.lod) { if (this.lod != other.lod) {
return false; return false;
} }
return Objects.equals(this.eventIDs, other.eventIDs); if (!Objects.equals(this.eventIDs, other.eventIDs)) {
return false;
}
return true;
} }
} }

View File

@ -0,0 +1,537 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import javafx.beans.Observable;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.ListChangeListener;
import javafx.collections.MapChangeListener;
import javax.annotation.concurrent.GuardedBy;
import org.joda.time.Interval;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.RootEventType;
import org.sleuthkit.autopsy.timeline.db.EventsRepository;
import org.sleuthkit.autopsy.timeline.events.DBUpdatedEvent;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.filters.DataSourceFilter;
import org.sleuthkit.autopsy.timeline.filters.DataSourcesFilter;
import org.sleuthkit.autopsy.timeline.filters.Filter;
import org.sleuthkit.autopsy.timeline.filters.HashHitsFilter;
import org.sleuthkit.autopsy.timeline.filters.HashSetFilter;
import org.sleuthkit.autopsy.timeline.filters.HideKnownFilter;
import org.sleuthkit.autopsy.timeline.filters.RootFilter;
import org.sleuthkit.autopsy.timeline.filters.TagNameFilter;
import org.sleuthkit.autopsy.timeline.filters.TagsFilter;
import org.sleuthkit.autopsy.timeline.filters.TextFilter;
import org.sleuthkit.autopsy.timeline.filters.TypeFilter;
import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
import org.sleuthkit.autopsy.timeline.zooming.ZoomParams;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TskCoreException;
/**
* This class acts as the model for a TimelineView
*
* Views can register listeners on properties returned by methods.
*
* This class is implemented as a filtered view into an underlying
* EventsRepository.
*
* TODO: as many methods as possible should cache their results so as to avoid
* unnecessary db calls through the EventsRepository -jm
*
* Concurrency Policy: repo is internally synchronized, so methods that only
* access the repo atomically do not need further synchronization
*
* all other member state variables should only be accessed with intrinsic lock
* of containing FilteredEventsModel held. Many methods delegate to a task
* submitted to the dbQueryThread executor. These methods should synchronize on
* this object, and the tasks should too. Since the tasks execute asynchronously
* from the invoking methods, the methods will return and release the lock for
* the tasks to obtain.
*
*/
public final class FilteredEventsModel {
private static final Logger LOGGER = Logger.getLogger(FilteredEventsModel.class.getName());
/**
* time range that spans the filtered events
*/
@GuardedBy("this")
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
@GuardedBy("this")
private final ReadOnlyObjectWrapper<RootFilter> requestedFilter = new ReadOnlyObjectWrapper<>();
@GuardedBy("this")
private final ReadOnlyObjectWrapper< EventTypeZoomLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(EventTypeZoomLevel.BASE_TYPE);
@GuardedBy("this")
private final ReadOnlyObjectWrapper< DescriptionLoD> requestedLOD = new ReadOnlyObjectWrapper<>(DescriptionLoD.SHORT);
@GuardedBy("this")
private final ReadOnlyObjectWrapper<ZoomParams> requestedZoomParamters = new ReadOnlyObjectWrapper<>();
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
/**
* The underlying repo for events. Atomic access to repo is synchronized
* internally, but compound access should be done with the intrinsic lock of
* this FilteredEventsModel object
*/
@GuardedBy("this")
private final EventsRepository repo;
private final Case autoCase;
public FilteredEventsModel(EventsRepository repo, ReadOnlyObjectProperty<ZoomParams> currentStateProperty) {
this.repo = repo;
this.autoCase = repo.getAutoCase();
repo.getDatasourcesMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(change.getValueAdded(), change.getKey());
RootFilter rootFilter = filterProperty().get();
rootFilter.getDataSourcesFilter().addSubFilter(dataSourceFilter);
requestedFilter.set(rootFilter.copyOf());
});
repo.getHashSetMap().addListener((MapChangeListener.Change<? extends Long, ? extends String> change) -> {
HashSetFilter hashSetFilter = new HashSetFilter(change.getValueAdded(), change.getKey());
RootFilter rootFilter = filterProperty().get();
rootFilter.getHashHitsFilter().addSubFilter(hashSetFilter);
requestedFilter.set(rootFilter.copyOf());
});
repo.getTagNames().addListener((ListChangeListener.Change<? extends TagName> c) -> {
RootFilter rootFilter = filterProperty().get();
TagsFilter tagsFilter = rootFilter.getTagsFilter();
repo.syncTagsFilter(tagsFilter);
requestedFilter.set(rootFilter.copyOf());
});
requestedFilter.set(getDefaultFilter());
//TODO: use bindings to keep these in sync? -jm
requestedZoomParamters.addListener((Observable observable) -> {
final ZoomParams zoomParams = requestedZoomParamters.get();
if (zoomParams != null) {
synchronized (FilteredEventsModel.this) {
requestedTypeZoom.set(zoomParams.getTypeZoomLevel());
requestedFilter.set(zoomParams.getFilter());
requestedTimeRange.set(zoomParams.getTimeRange());
requestedLOD.set(zoomParams.getDescriptionLOD());
}
}
});
requestedZoomParamters.bind(currentStateProperty);
}
/**
* Readonly observable property for the current ZoomParams
*
* @return A readonly observable property for the current ZoomParams.
*/
synchronized public ReadOnlyObjectProperty<ZoomParams> zoomParametersProperty() {
return requestedZoomParamters.getReadOnlyProperty();
}
/**
* Get the current ZoomParams
*
* @return The current ZoomParams
*/
synchronized public ZoomParams getZoomParamaters() {
return requestedZoomParamters.get();
}
/**
* Get a read only view of the time range currently in view.
*
* @return A read only view of the time range currently in view.
*/
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
if (requestedTimeRange.get() == null) {
requestedTimeRange.set(getSpanningInterval());
}
return requestedTimeRange.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<DescriptionLoD> descriptionLODProperty() {
return requestedLOD.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<RootFilter> filterProperty() {
return requestedFilter.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<EventTypeZoomLevel> eventTypeZoomProperty() {
return requestedTypeZoom.getReadOnlyProperty();
}
/**
* The time range currently in view.
*
* @return The time range currently in view.
*/
synchronized public Interval getTimeRange() {
return timeRangeProperty().get();
}
synchronized public DescriptionLoD getDescriptionLOD() {
return requestedLOD.get();
}
synchronized public RootFilter getFilter() {
return requestedFilter.get();
}
synchronized public EventTypeZoomLevel getEventTypeZoom() {
return requestedTypeZoom.get();
}
/**
* @return the default filter used at startup
*/
public RootFilter getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
repo.getDatasourcesMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
DataSourceFilter dataSourceFilter = new DataSourceFilter(t.getValue(), t.getKey());
dataSourceFilter.setSelected(Boolean.TRUE);
dataSourcesFilter.addSubFilter(dataSourceFilter);
});
HashHitsFilter hashHitsFilter = new HashHitsFilter();
repo.getHashSetMap().entrySet().stream().forEach((Map.Entry<Long, String> t) -> {
HashSetFilter hashSetFilter = new HashSetFilter(t.getValue(), t.getKey());
hashSetFilter.setSelected(Boolean.TRUE);
hashHitsFilter.addSubFilter(hashSetFilter);
});
TagsFilter tagsFilter = new TagsFilter();
repo.getTagNames().stream().forEach(t -> {
TagNameFilter tagNameFilter = new TagNameFilter(t, autoCase);
tagNameFilter.setSelected(Boolean.TRUE);
tagsFilter.addSubFilter(tagNameFilter);
});
return new RootFilter(new HideKnownFilter(), tagsFilter, hashHitsFilter, new TextFilter(), new TypeFilter(RootEventType.getInstance()), dataSourcesFilter, Collections.emptySet());
}
public Interval getBoundingEventsInterval() {
return repo.getBoundingEventsInterval(zoomParametersProperty().get().getTimeRange(), zoomParametersProperty().get().getFilter());
}
public SingleEvent getEventById(Long eventID) {
return repo.getEventById(eventID);
}
public Set<SingleEvent> getEventsById(Collection<Long> eventIDs) {
return repo.getEventsById(eventIDs);
}
/**
* get a count of tagnames applied to the given event ids as a map from
* tagname displayname to count of tag applications
*
* @param eventIDsWithTags the event ids to get the tag counts map for
*
* @return a map from tagname displayname to count of applications
*/
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) {
return repo.getTagCountsByTagName(eventIDsWithTags);
}
public List<Long> getEventIDs(Interval timeRange, Filter filter) {
final Interval overlap;
final RootFilter intersect;
synchronized (this) {
overlap = getSpanningInterval().overlap(timeRange);
intersect = requestedFilter.get().copyOf();
}
intersect.getSubFilters().add(filter);
return repo.getEventIDs(overlap, intersect);
}
/**
* Get a representation of all the events, within the given time range, that
* pass the given filter, grouped by time and description such that file
* system events for the same file, with the same timestamp, are combined
* together.
*
* @return A List of combined events, sorted by timestamp.
*/
public List<CombinedEvent> getCombinedEvents() {
return repo.getCombinedEvents(requestedTimeRange.get(), requestedFilter.get());
}
/**
* return the number of events that pass the requested filter and are within
* the given time range.
*
* NOTE: this method does not change the requested time range
*
* @param timeRange
*
* @return
*/
public Map<EventType, Long> getEventCounts(Interval timeRange) {
final RootFilter filter;
final EventTypeZoomLevel typeZoom;
synchronized (this) {
filter = requestedFilter.get();
typeZoom = requestedTypeZoom.get();
}
return repo.countEvents(new ZoomParams(timeRange, typeZoom, filter, null));
}
/**
* @return the smallest interval spanning all the events from the
* repository, ignoring any filters or requested ranges
*/
public Interval getSpanningInterval() {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
}
/**
* @return the smallest interval spanning all the given events
*/
public Interval getSpanningInterval(Collection<Long> eventIDs) {
return repo.getSpanningInterval(eventIDs);
}
/**
* @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or
* requested ranges
*/
public Long getMinTime() {
return repo.getMinTime();
}
/**
* @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or
* requested ranges
*/
public Long getMaxTime() {
return repo.getMaxTime();
}
/**
*
* @return a list of event clusters at the requested zoom levels that are
* within the requested time range and pass the requested filter
*/
public List<EventStripe> getEventStripes() {
final Interval range;
final RootFilter filter;
final EventTypeZoomLevel zoom;
final DescriptionLoD lod;
synchronized (this) {
range = requestedTimeRange.get();
filter = requestedFilter.get();
zoom = requestedTypeZoom.get();
lod = requestedLOD.get();
}
return repo.getEventStripes(new ZoomParams(range, zoom, filter, lod));
}
/**
* @param params
*
* @return a list of aggregated events that are within the requested time
* range and pass the requested filter, using the given aggregation
* to control the grouping of events
*/
public List<EventStripe> getEventStripes(ZoomParams params) {
return repo.getEventStripes(params);
}
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) {
ContentTag contentTag = evt.getAddedTag();
Content content = contentTag.getContent();
Set<Long> updatedEventIDs = repo.addTag(content.getId(), null, contentTag, null);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) {
BlackboardArtifactTag artifactTag = evt.getAddedTag();
BlackboardArtifact artifact = artifactTag.getArtifact();
Set<Long> updatedEventIDs = repo.addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag, null);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = repo.deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "unable to determine tagged status of content.", ex); //NON-NLS
}
return false;
}
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
try {
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = repo.deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "unable to determine tagged status of artifact.", ex); //NON-NLS
}
return false;
}
/**
* Get a List of event IDs for the events that are derived from the given
* file.
*
* @param file The AbstractFile to get derived event IDs
* for.
* @param includeDerivedArtifacts If true, also get event IDs for events
* derived from artifacts derived form this
* file. If false, only gets events derived
* directly from this file (file system
* timestamps).
*
* @return A List of event IDs for the events that are derived from the
* given file.
*/
public List<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) {
return repo.getEventIDsForFile(file, includeDerivedArtifacts);
}
/**
* Get a List of event IDs for the events that are derived from the given
* artifact.
*
* @param artifact The BlackboardArtifact to get derived event IDs for.
*
* @return A List of event IDs for the events that are derived from the
* given artifact.
*/
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) {
return repo.getEventIDsForArtifact(artifact);
}
/**
* Post a TagsAddedEvent to all registered subscribers, if the given set of
* updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsAddedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsAddedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Post a TagsDeletedEvent to all registered subscribers, if the given set
* of updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsDeletedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Register the given object to receive events.
*
* @param o The object to register. Must implement public methods annotated
* with Subscribe.
*/
synchronized public void registerForEvents(Object o) {
eventbus.register(o);
}
/**
* Un-register the given object, so it no longer receives events.
*
* @param o The object to un-register.
*/
synchronized public void unRegisterForEvents(Object o) {
eventbus.unregister(o);
}
/**
* Post a DBUpdatedEvent to all registered subscribers.
*/
public void postDBUpdated() {
eventbus.post(new DBUpdatedEvent());
}
/**
* Post a RefreshRequestedEvent to all registered subscribers.
*/
public void postRefreshRequest() {
eventbus.post(new RefreshRequestedEvent());
}
/**
* (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers.
*/
public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event);
}
}

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018 Basis Technology Corp. * Copyright 2015-16 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -16,22 +16,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; package org.sleuthkit.autopsy.timeline.datamodel;
import java.util.Optional; import java.util.Optional;
import java.util.SortedSet; import java.util.SortedSet;
/** /**
* A interface for groups of events that share some attributes in common. * A interface for groups of events that share some attributes in common.
* @param <ParentType>
*/ */
public interface MultiEvent<ParentType extends MultiEvent<?>> extends DetailViewEvent { public interface MultiEvent<ParentType extends MultiEvent<?>> extends TimeLineEvent {
@Override
long getEndMillis(); long getEndMillis();
Optional<ParentType> getParent(); Optional<ParentType> getParent();
@Override
SortedSet<EventCluster> getClusters(); SortedSet<EventCluster> getClusters();
} }

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2018-2019 Basis Technology Corp. * Copyright 2014-16 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; package org.sleuthkit.autopsy.timeline.datamodel;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.ImmutableSortedSet;
@ -25,22 +25,24 @@ import java.util.Comparator;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import org.joda.time.Interval; import org.joda.time.Interval;
import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
import org.sleuthkit.datamodel.timeline.TimelineEvent; import org.sleuthkit.datamodel.TskData;
/** /**
* A single event. * A single event.
*/ */
public class SingleDetailsViewEvent implements DetailViewEvent { @Immutable
public class SingleEvent implements TimeLineEvent {
private final long eventID; private final long eventID;
/** /**
* The TSK object ID of the file (could be data source) this event is * The TSK object ID of the file this event is derived from.
* derived from.
*/ */
private final long fileObjId; private final long objID;
/** /**
* The TSK artifact ID of the file this event is derived from. Null, if this * The TSK artifact ID of the file this event is derived from. Null, if this
@ -51,7 +53,7 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
/** /**
* The TSK datasource ID of the datasource this event belongs to. * The TSK datasource ID of the datasource this event belongs to.
*/ */
private final long dataSourceObjId; private final long dataSourceID;
/** /**
* The time of this event in second from the Unix epoch. * The time of this event in second from the Unix epoch.
@ -68,6 +70,11 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
*/ */
private final ImmutableMap<DescriptionLoD, String> descriptions; private final ImmutableMap<DescriptionLoD, String> descriptions;
/**
* The known value for the file this event is derived from.
*/
private final TskData.FileKnown known;
/** /**
* True if the file this event is derived from hits any of the configured * True if the file this event is derived from hits any of the configured
* hash sets. * hash sets.
@ -85,60 +92,32 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
*/ */
private MultiEvent<?> parent = null; private MultiEvent<?> parent = null;
/** public SingleEvent(long eventID, long dataSourceID, long objID, @Nullable Long artifactID, long time, EventType type, String fullDescription, String medDescription, String shortDescription, TskData.FileKnown known, boolean hashHit, boolean tagged) {
*
* @param eventID
* @param dataSourceObjId
* @param fileObjId Object Id of file (could be a data source) that
* event is associated with
* @param artifactID
* @param time
* @param type
* @param fullDescription
* @param medDescription
* @param shortDescription
* @param hashHit
* @param tagged
*/
public SingleDetailsViewEvent(long eventID, long dataSourceObjId, long fileObjId, Long artifactID, long time, EventType type, String fullDescription, String medDescription, String shortDescription, boolean hashHit, boolean tagged) {
this.eventID = eventID; this.eventID = eventID;
this.dataSourceObjId = dataSourceObjId; this.dataSourceID = dataSourceID;
this.fileObjId = fileObjId; this.objID = objID;
this.artifactID = Long.valueOf(0).equals(artifactID) ? null : artifactID; this.artifactID = Long.valueOf(0).equals(artifactID) ? null : artifactID;
this.time = time; this.time = time;
this.type = type; this.type = type;
descriptions = ImmutableMap.<DescriptionLoD, String>of(DescriptionLoD.FULL, fullDescription, descriptions = ImmutableMap.<DescriptionLoD, String>of(DescriptionLoD.FULL, fullDescription,
DescriptionLoD.MEDIUM, medDescription, DescriptionLoD.MEDIUM, medDescription,
DescriptionLoD.SHORT, shortDescription); DescriptionLoD.SHORT, shortDescription);
this.known = known;
this.hashHit = hashHit; this.hashHit = hashHit;
this.tagged = tagged; this.tagged = tagged;
} }
public SingleDetailsViewEvent(TimelineEvent singleEvent) {
this(singleEvent.getEventID(),
singleEvent.getDataSourceObjID(),
singleEvent.getFileObjID(),
singleEvent.getArtifactID().orElse(null),
singleEvent.getTime(),
singleEvent.getEventType(),
singleEvent.getFullDescription(),
singleEvent.getMedDescription(),
singleEvent.getShortDescription(),
singleEvent.isHashHit(),
singleEvent.isTagged());
}
/** /**
* Get a new SingleDetailsViewEvent that is the same as this event, but with * Get a new SingleEvent that is the same as this event, but with the given
* the given parent. * parent.
* *
* @param newParent the parent of the new event object. * @param newParent the parent of the new event object.
* *
* @return a new SingleDetailsViewEvent that is the same as this event, but * @return a new SingleEvent that is the same as this event, but with the
* with the given parent. * given parent.
*/ */
public SingleDetailsViewEvent withParent(MultiEvent<?> newParent) { public SingleEvent withParent(MultiEvent<?> newParent) {
SingleDetailsViewEvent singleEvent = new SingleDetailsViewEvent(eventID, dataSourceObjId, fileObjId, artifactID, time, type, descriptions.get(DescriptionLoD.FULL), descriptions.get(DescriptionLoD.MEDIUM), descriptions.get(DescriptionLoD.SHORT), hashHit, tagged); SingleEvent singleEvent = new SingleEvent(eventID, dataSourceID, objID, artifactID, time, type, descriptions.get(DescriptionLoD.FULL), descriptions.get(DescriptionLoD.MEDIUM), descriptions.get(DescriptionLoD.SHORT), known, hashHit, tagged);
singleEvent.parent = newParent; singleEvent.parent = newParent;
return singleEvent; return singleEvent;
} }
@ -184,13 +163,12 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
} }
/** /**
* Get the obj id of the file (which could be a data source) this event is * Get the obj id of the file this event is derived from.
* derived from.
* *
* @return the object id. * @return the object id.
*/ */
public long getFileID() { public long getFileID() {
return fileObjId; return objID;
} }
/** /**
@ -234,6 +212,15 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
return getDescription(DescriptionLoD.SHORT); return getDescription(DescriptionLoD.SHORT);
} }
/**
* Get the known value of the file this event is derived from.
*
* @return the known value
*/
public TskData.FileKnown getKnown() {
return known;
}
/** /**
* Get the description of this event at the give level of detail(LoD). * Get the description of this event at the give level of detail(LoD).
* *
@ -250,8 +237,8 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
* *
* @return the datasource id. * @return the datasource id.
*/ */
public long getDataSourceObjID() { public long getDataSourceID() {
return dataSourceObjId; return dataSourceID;
} }
@Override @Override
@ -294,8 +281,11 @@ public class SingleDetailsViewEvent implements DetailViewEvent {
if (getClass() != obj.getClass()) { if (getClass() != obj.getClass()) {
return false; return false;
} }
final SingleDetailsViewEvent other = (SingleDetailsViewEvent) obj; final SingleEvent other = (SingleEvent) obj;
return this.eventID == other.eventID; if (this.eventID != other.eventID) {
return false;
}
return true;
} }
@Override @Override

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2019 Basis Technology Corp. * Copyright 2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -16,14 +16,13 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.sleuthkit.autopsy.timeline.ui.detailview.datamodel; package org.sleuthkit.autopsy.timeline.datamodel;
import java.util.Comparator;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import org.sleuthkit.datamodel.DescriptionLoD; import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.datamodel.timeline.EventType; import org.sleuthkit.autopsy.timeline.zooming.DescriptionLoD;
/** /**
* An event of the timeline. Concrete implementations may represent single * An event of the timeline. Concrete implementations may represent single
@ -32,7 +31,7 @@ import org.sleuthkit.datamodel.timeline.EventType;
* event types). Note that for SingleEvents or events that are all simultaneous, * event types). Note that for SingleEvents or events that are all simultaneous,
* the start time may be equal to the end time. * the start time may be equal to the end time.
*/ */
public interface DetailViewEvent { public interface TimeLineEvent {
/** /**
* Get a description of this event. Implementations may choose what level of * Get a description of this event. Implementations may choose what level of
@ -120,12 +119,4 @@ public interface DetailViewEvent {
* @return The EventClusters that make up this event. * @return The EventClusters that make up this event.
*/ */
SortedSet<EventCluster> getClusters(); SortedSet<EventCluster> getClusters();
static class StartComparator implements Comparator<DetailViewEvent> {
@Override
public int compare(DetailViewEvent o1, DetailViewEvent o2) {
return Long.compare(o1.getStartMillis(), o2.getStartMillis());
}
}
} }

View File

@ -0,0 +1,207 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014-16 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.text.MessageFormat;
import java.util.Optional;
import java.util.function.Function;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
/**
*
*/
public interface ArtifactEventType extends EventType {
public static final Logger LOGGER = Logger.getLogger(ArtifactEventType.class.getName());
/**
* Get the artifact type this event type is derived from.
*
* @return The artifact type this event type is derived from.
*/
public BlackboardArtifact.Type getArtifactType();
/**
* The attribute type this event type is derived from.
*
* @return The attribute type this event type is derived from.
*/
public BlackboardAttribute.Type getDateTimeAttributeType();
/**
* Get the ID of the the artifact type that this EventType is derived from.
*
* @return the ID of the the artifact type that this EventType is derived
* from.
*/
public default int getArtifactTypeID() {
return getArtifactType().getTypeID();
}
/**
* given an artifact, pull out the time stamp, and compose the descriptions.
* Each implementation of ArtifactEventType needs to implement
* parseAttributesHelper() as hook for buildEventDescription(org.sleuthkit.datamodel.BlackboardArtifact)
* to invoke. Most subtypes can use this default implementation.
*
* @param artf
*
* @return an AttributeEventDescription containing the timestamp
* and description information
*
* @throws TskCoreException
*/
default AttributeEventDescription parseAttributesHelper(BlackboardArtifact artf) throws TskCoreException {
final BlackboardAttribute dateTimeAttr = artf.getAttribute(getDateTimeAttributeType());
long time = dateTimeAttr.getValueLong();
String shortDescription = getShortExtractor().apply(artf);
String medDescription = shortDescription + " : " + getMedExtractor().apply(artf);
String fullDescription = medDescription + " : " + getFullExtractor().apply(artf);
return new AttributeEventDescription(time, shortDescription, medDescription, fullDescription);
}
/**
* @return a function from an artifact to a String to use as part of the
* full event description
*/
Function<BlackboardArtifact, String> getFullExtractor();
/**
* @return a function from an artifact to a String to use as part of the
* medium event description
*/
Function<BlackboardArtifact, String> getMedExtractor();
/**
* @return a function from an artifact to a String to use as part of the
* short event description
*/
Function<BlackboardArtifact, String> getShortExtractor();
/**
* bundles the per event information derived from a BlackBoard Artifact into
* one object. Primarily used to have a single return value for
* ArtifactEventType#buildEventDescription(ArtifactEventType, BlackboardArtifact).
*/
static class AttributeEventDescription {
final private long time;
public long getTime() {
return time;
}
public String getShortDescription() {
return shortDescription;
}
public String getMedDescription() {
return medDescription;
}
public String getFullDescription() {
return fullDescription;
}
final private String shortDescription;
final private String medDescription;
final private String fullDescription;
public AttributeEventDescription(long time, String shortDescription,
String medDescription,
String fullDescription) {
this.time = time;
this.shortDescription = shortDescription;
this.medDescription = medDescription;
this.fullDescription = fullDescription;
}
}
/**
* Build a AttributeEventDescription derived from a BlackboardArtifact. This
* is a template method that relies on each ArtifactEventType's
* implementation of ArtifactEventType#parseAttributesHelper() to know how
* to go from BlackboardAttributes to the event description.
*
* @param type
* @param artf the BlackboardArtifact to derive the event description from
*
* @return an AttributeEventDescription derived from the given artifact, if
* the given artifact has no timestamp
*
* @throws TskCoreException is there is a problem accessing the blackboard
* data
*/
static public AttributeEventDescription buildEventDescription(ArtifactEventType type, BlackboardArtifact artf) throws TskCoreException {
//if we got passed an artifact that doesn't correspond to the type of the event,
//something went very wrong. throw an exception.
if (type.getArtifactTypeID() != artf.getArtifactTypeID()) {
throw new IllegalArgumentException();
}
if (artf.getAttribute(type.getDateTimeAttributeType()) == null) {
LOGGER.log(Level.WARNING, "Artifact {0} has no date/time attribute, skipping it.", artf.getArtifactID()); // NON-NLS
return null;
}
//use the hook provided by this subtype implementation
return type.parseAttributesHelper(artf);
}
static class AttributeExtractor implements Function<BlackboardArtifact, String> {
public String apply(BlackboardArtifact artf) {
return Optional.ofNullable(getAttributeSafe(artf, attributeType))
.map(BlackboardAttribute::getDisplayString)
.map(StringUtils::defaultString)
.orElse("");
}
private final BlackboardAttribute.Type attributeType;
public AttributeExtractor(BlackboardAttribute.Type attribute) {
this.attributeType = attribute;
}
}
static class EmptyExtractor implements Function<BlackboardArtifact, String> {
@Override
public String apply(BlackboardArtifact t) {
return "";
}
}
static BlackboardAttribute getAttributeSafe(BlackboardArtifact artf, BlackboardAttribute.Type attrType) {
try {
return artf.getAttribute(attrType);
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, MessageFormat.format("Error getting attribute from artifact {0}.", artf.getArtifactID()), ex); // NON-NLS
return null;
}
}
}

View File

@ -0,0 +1,110 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.datamodel.eventtype;
import java.util.Arrays;
import java.util.List;
import javafx.scene.image.Image;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.timeline.zooming.EventTypeZoomLevel;
/**
* RootTypes are event types that have no super type.
*/
public enum BaseTypes implements EventType {
FILE_SYSTEM(NbBundle.getMessage(BaseTypes.class, "BaseTypes.fileSystem.name"), "blue-document.png") { // NON-NLS
@Override
public List<? extends EventType> getSubTypes() {
return Arrays.asList(FileSystemTypes.values());
}
@Override
public EventType getSubType(String string) {
return FileSystemTypes.valueOf(string);
}
},
WEB_ACTIVITY(NbBundle.getMessage(BaseTypes.class, "BaseTypes.webActivity.name"), "web-file.png") { // NON-NLS
@Override
public List<? extends EventType> getSubTypes() {
return Arrays.asList(WebTypes.values());
}
@Override
public EventType getSubType(String string) {
return WebTypes.valueOf(string);
}
},
MISC_TYPES(NbBundle.getMessage(BaseTypes.class, "BaseTypes.miscTypes.name"), "block.png") { // NON-NLS
@Override
public List<? extends EventType> getSubTypes() {
return Arrays.asList(MiscTypes.values());
}
@Override
public EventType getSubType(String string) {
return MiscTypes.valueOf(string);
}
};
private final String displayName;
private final String iconBase;
private final Image image;
@Override
public Image getFXImage() {
return image;
}
@Override
public String getIconBase() {
return iconBase;
}
@Override
public EventTypeZoomLevel getZoomLevel() {
return EventTypeZoomLevel.BASE_TYPE;
}
@Override
public String getDisplayName() {
return displayName;
}
private BaseTypes(String displayName, String iconBase) {
this.displayName = displayName;
this.iconBase = iconBase;
this.image = new Image("org/sleuthkit/autopsy/timeline/images/" + iconBase, true); // NON-NLS
}
@Override
public EventType getSuperType() {
return RootEventType.getInstance();
}
@Override
public EventType getSubType(String string) {
return BaseTypes.valueOf(string);
}
}

Some files were not shown because too many files have changed in this diff Show More