Merge branch 'file-search' of https://github.com/sleuthkit/autopsy into 5367-DiscoverPaging

This commit is contained in:
William Schaefer 2019-08-20 14:37:19 -04:00
commit cdd61fb6aa
2988 changed files with 174293 additions and 9615 deletions

View File

@ -39,7 +39,11 @@
<copy todir="${basedir}/release/Tesseract-OCR" >
<fileset dir="${thirdparty.dir}/Tesseract-OCR"/>
</copy>
<!--Copy Plaso to release-->
<copy todir="${basedir}/release/plaso" >
<fileset dir="${thirdparty.dir}/plaso"/>
</copy>
<!--Copy GStreamer to release-->
<copy todir="${basedir}/release/gstreamer" >
<fileset dir="${thirdparty.dir}/gstreamer"/>

View File

@ -2,7 +2,7 @@ Manifest-Version: 1.0
OpenIDE-Module: org.sleuthkit.autopsy.core/10
OpenIDE-Module-Localizing-Bundle: org/sleuthkit/autopsy/core/Bundle.properties
OpenIDE-Module-Layer: org/sleuthkit/autopsy/core/layer.xml
OpenIDE-Module-Implementation-Version: 27
OpenIDE-Module-Implementation-Version: 28
OpenIDE-Module-Requires: org.openide.windows.WindowManager
AutoUpdate-Show-In-Client: true
AutoUpdate-Essential-Module: true

View File

@ -122,5 +122,5 @@ nbm.homepage=http://www.sleuthkit.org/
nbm.module.author=Brian Carrier
nbm.needs.restart=true
source.reference.curator-recipes-2.8.0.jar=release/modules/ext/curator-recipes-2.8.0-sources.jar
spec.version.base=10.15
spec.version.base=10.16

View File

@ -60,7 +60,6 @@ Case.progressMessage.cancelling=Cancelling...
Case.progressMessage.clearingTempDirectory=Clearing case temp directory...
Case.progressMessage.closingApplicationServiceResources=Closing case-specific application service resources...
Case.progressMessage.closingCaseDatabase=Closing case database...
Case.progressMessage.closingCaseLevelServices=Closing case-level services...
Case.progressMessage.connectingToCoordSvc=Connecting to coordination service...
Case.progressMessage.creatingCaseDatabase=Creating case database...
Case.progressMessage.creatingCaseDirectory=Creating case directory...

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Copyright 2012-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,6 +19,7 @@
package org.sleuthkit.autopsy.casemodule;
import com.google.common.annotations.Beta;
import com.google.common.eventbus.Subscribe;
import org.sleuthkit.autopsy.casemodule.multiusercases.CaseNodeData;
import java.awt.Frame;
import java.awt.event.ActionEvent;
@ -26,7 +27,6 @@ import java.awt.event.ActionListener;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.io.File;
import java.io.IOException;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.nio.file.Paths;
@ -35,7 +35,6 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
@ -68,7 +67,6 @@ import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.actions.OpenOutputFolderAction;
import org.sleuthkit.autopsy.appservices.AutopsyService;
import org.sleuthkit.autopsy.appservices.AutopsyService.CaseContext;
import static org.sleuthkit.autopsy.casemodule.Bundle.*;
import org.sleuthkit.autopsy.casemodule.CaseMetadata.CaseMetadataException;
import org.sleuthkit.autopsy.casemodule.datasourcesummary.DataSourceSummaryAction;
import org.sleuthkit.autopsy.casemodule.events.AddingDataSourceEvent;
@ -108,12 +106,17 @@ import org.sleuthkit.autopsy.events.AutopsyEventException;
import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
import org.sleuthkit.autopsy.ingest.IngestJob;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchServiceException;
import org.sleuthkit.autopsy.progress.LoggingProgressIndicator;
import org.sleuthkit.autopsy.progress.ModalDialogProgressIndicator;
import org.sleuthkit.autopsy.progress.ProgressIndicator;
import org.sleuthkit.autopsy.timeline.OpenTimelineAction;
import org.sleuthkit.autopsy.timeline.events.TimelineEventAddedEvent;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.CaseDbConnectionInfo;
import org.sleuthkit.datamodel.Content;
@ -121,6 +124,7 @@ import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Report;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
@ -155,6 +159,7 @@ public class Case {
private CollaborationMonitor collaborationMonitor;
private Services caseServices;
private boolean hasDataSources;
private final TSKCaseRepublisher tskEventForwarder = new TSKCaseRepublisher();
/*
* Get a reference to the main window of the desktop application to use to
@ -388,13 +393,44 @@ public class Case {
*/
TAG_DEFINITION_CHANGED,
/**
* An item in the central repository has had its comment modified. The
* old value is null, the new value is string for current comment.
* An timeline event, such mac time or web activity was added to the
* current case. The old value is null and the new value is the
* TimelineEvent that was added.
*/
TIMELINE_EVENT_ADDED,
/* An item in the central repository has had its comment
* modified. The old value is null, the new value is string for current
* comment.
*/
CR_COMMENT_CHANGED;
};
private final class TSKCaseRepublisher {
@Subscribe
public void rebroadcastTimelineEventCreated(TimelineManager.TimelineEventAddedEvent event) {
eventPublisher.publish(new TimelineEventAddedEvent(event));
}
@SuppressWarnings("deprecation")
@Subscribe
public void rebroadcastArtifactsPosted(Blackboard.ArtifactsPostedEvent event) {
for (BlackboardArtifact.Type artifactType : event.getArtifactTypes()) {
/*
* fireModuleDataEvent is deprecated so module writers don't use
* it (they should use Blackboard.postArtifact(s) instead), but
* we still need a way to rebroadcast the ArtifactsPostedEvent
* as a ModuleDataEvent.
*/
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(
event.getModuleName(),
artifactType,
event.getArtifacts(artifactType)));
}
}
}
/**
* Adds a subscriber to all case events. To subscribe to only specific
* events, use one of the overloads of addEventSubscriber.
@ -499,8 +535,8 @@ public class Case {
*/
public static boolean isValidName(String caseName) {
return !(caseName.contains("\\") || caseName.contains("/") || caseName.contains(":")
|| caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
|| caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
|| caseName.contains("*") || caseName.contains("?") || caseName.contains("\"")
|| caseName.contains("<") || caseName.contains(">") || caseName.contains("|"));
}
/**
@ -2128,7 +2164,7 @@ public class Case {
} else if (UserPreferences.getIsMultiUserModeEnabled()) {
caseDb = SleuthkitCase.openCase(databaseName, UserPreferences.getDatabaseConnectionInfo(), metadata.getCaseDirectory());
} else {
throw new CaseActionException(Case_open_exception_multiUserCaseNotEnabled());
throw new CaseActionException(Bundle.Case_open_exception_multiUserCaseNotEnabled());
}
} catch (TskUnsupportedSchemaVersionException ex) {
throw new CaseActionException(Bundle.Case_exceptionMessage_unsupportedSchemaVersionMessage(ex.getLocalizedMessage()), ex);
@ -2150,6 +2186,8 @@ public class Case {
private void openCaseLevelServices(ProgressIndicator progressIndicator) {
progressIndicator.progress(Bundle.Case_progressMessage_openingCaseLevelServices());
this.caseServices = new Services(caseDb);
caseDb.registerForEvents(tskEventForwarder);
}
/**
@ -2373,7 +2411,6 @@ public class Case {
@Messages({
"Case.progressMessage.shuttingDownNetworkCommunications=Shutting down network communications...",
"Case.progressMessage.closingApplicationServiceResources=Closing case-specific application service resources...",
"Case.progressMessage.closingCaseLevelServices=Closing case-level services...",
"Case.progressMessage.closingCaseDatabase=Closing case database..."
})
private void close(ProgressIndicator progressIndicator) {
@ -2399,22 +2436,11 @@ public class Case {
closeAppServiceCaseResources();
/*
* Close the case-level services.
*/
if (null != caseServices) {
progressIndicator.progress(Bundle.Case_progressMessage_closingCaseLevelServices());
try {
this.caseServices.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, String.format("Error closing internal case services for %s at %s", this.getName(), this.getCaseDirectory()), ex);
}
}
/*
* Close the case database
* Close the case database.
*/
if (null != caseDb) {
progressIndicator.progress(Bundle.Case_progressMessage_closingCaseDatabase());
caseDb.unregisterForEvents(tskEventForwarder);
caseDb.close();
}

View File

@ -27,7 +27,7 @@ import javax.swing.table.AbstractTableModel;
import javax.swing.table.DefaultTableCellRenderer;
import javax.swing.table.DefaultTableModel;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.datamodel.utils.FileTypeUtils;
import org.sleuthkit.autopsy.coreutils.FileTypeUtils;
import org.sleuthkit.datamodel.DataSource;
/**

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2015-2018 Basis Technology Corp.
* Copyright 2015-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -15,58 +15,50 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* TODO (AUT-2158): This class should not extend Closeable.
*/
package org.sleuthkit.autopsy.casemodule.services;
import java.io.Closeable;
import java.io.IOException;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskDataException;
/**
* A representation of the blackboard, a place where artifacts and their
* attributes are posted.
*
* NOTE: This API of this class is under development.
* @deprecated Use org.sleuthkit.datamodel.Blackboard instead.
*/
@Deprecated
public final class Blackboard implements Closeable {
private SleuthkitCase caseDb;
/**
* Constructs a representation of the blackboard, a place where artifacts
* and their attributes are posted.
*
* @param casedb The case database.
* @deprecated Do not use.
*/
Blackboard(SleuthkitCase casedb) {
this.caseDb = casedb;
@Deprecated
Blackboard() {
}
/**
* Indexes the text associated with the an artifact.
* Indexes the text associated with an artifact.
*
* @param artifact The artifact to be indexed.
*
* @throws BlackboardException If there is a problem indexing the artifact.
* @deprecated Use org.sleuthkit.datamodel.Blackboard.postArtifact instead.
*/
@Deprecated
public synchronized void indexArtifact(BlackboardArtifact artifact) throws BlackboardException {
if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed");
}
KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
if (null == searchService) {
throw new BlackboardException("Keyword search service not found");
}
try {
searchService.index(artifact);
} catch (TskCoreException ex) {
throw new BlackboardException("Error indexing artifact", ex);
Case.getCurrentCase().getSleuthkitCase().getBlackboard().postArtifact(artifact, "");
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
throw new BlackboardException(ex.getMessage(), ex);
}
}
@ -81,21 +73,15 @@ public final class Blackboard implements Closeable {
*
* @throws BlackboardException If there is a problem getting or adding the
* artifact type.
* @deprecated Use org.sleuthkit.datamodel.Blackboard.getOrAddArtifactType
* instead.
*/
@Deprecated
public synchronized BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException {
if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed");
}
try {
return caseDb.addBlackboardArtifactType(typeName, displayName);
} catch (TskDataException typeExistsEx) {
try {
return caseDb.getArtifactType(typeName);
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add artifact type", ex);
}
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add artifact type", ex);
return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddArtifactType(typeName, displayName);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
throw new BlackboardException(ex.getMessage(), ex);
}
}
@ -111,37 +97,40 @@ public final class Blackboard implements Closeable {
*
* @throws BlackboardException If there is a problem getting or adding the
* attribute type.
* @deprecated Use org.sleuthkit.datamodel.Blackboard.getOrAddArtifactType
* instead.
*/
@Deprecated
public synchronized BlackboardAttribute.Type getOrAddAttributeType(String typeName, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws BlackboardException {
if (null == caseDb) {
throw new BlackboardException("Blackboard has been closed");
}
try {
return caseDb.addArtifactAttributeType(typeName, valueType, displayName);
} catch (TskDataException typeExistsEx) {
try {
return caseDb.getAttributeType(typeName);
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add attribute type", ex);
}
} catch (TskCoreException ex) {
throw new BlackboardException("Failed to get or add attribute type", ex);
return Case.getCurrentCase().getSleuthkitCase().getBlackboard().getOrAddAttributeType(typeName, valueType, displayName);
} catch (org.sleuthkit.datamodel.Blackboard.BlackboardException ex) {
throw new BlackboardException(ex.getMessage(), ex);
}
}
/**
* Closes the blackboard.
* Closes the artifacts blackboard.
*
* @throws IOException If there is a problem closing the blackboard.
* @throws IOException If there is a problem closing the artifacts
* blackboard.
* @deprecated Do not use.
*/
@Override
public synchronized void close() throws IOException {
caseDb = null;
@Deprecated
public void close() throws IOException {
/*
* No-op maintained for backwards compatibility. Clients should not
* attempt to close case services.
*/
}
/**
* A blackboard exception.
*
* @deprecated Use org.sleuthkit.datamodel.Blackboard.BlackboardException
* instead.
*/
@Deprecated
public static final class BlackboardException extends Exception {
private static final long serialVersionUID = 1L;
@ -150,7 +139,10 @@ public final class Blackboard implements Closeable {
* Constructs a blackboard exception with the specified message.
*
* @param message The message.
*
* @deprecated Do not use.
*/
@Deprecated
public BlackboardException(String message) {
super(message);
}
@ -161,9 +153,13 @@ public final class Blackboard implements Closeable {
*
* @param message The message.
* @param cause The cause.
*
* @deprecated Do not use.
*/
@Deprecated
public BlackboardException(String message, Throwable cause) {
super(message, cause);
}
}
}

View File

@ -2,8 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
*
* Copyright 2012-2019 Basis Technology Corp.
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
@ -19,6 +18,8 @@
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* TODO (AUT-2158): This class should not extend Closeable.
*/
package org.sleuthkit.autopsy.casemodule.services;
@ -87,25 +88,26 @@ public class FileManager implements Closeable {
}
return caseDb.findAllFilesWhere(createFileTypeInCondition(mimeTypes));
}
/**
* Finds all parent_paths that match the specified parentPath and are in the specified data source.
*
* Finds all parent_paths that match the specified parentPath and are in the
* specified data source.
*
* @param dataSourceObjectID - the id of the data source to get files from
* @param parentPath - the parent path that all files should be like
*
* @param parentPath - the parent path that all files should be like
*
* @return The list of files
*
* @throws TskCoreException If there is a problem querying the case
*
* @throws TskCoreException If there is a problem querying the case
* database.
*/
public synchronized List<AbstractFile> findFilesByParentPath(long dataSourceObjectID, String parentPath) throws TskCoreException {
if (null == caseDb) {
throw new TskCoreException("File manager has been closed");
}
return caseDb.findAllFilesWhere(createParentPathCondition(dataSourceObjectID,parentPath));
return caseDb.findAllFilesWhere(createParentPathCondition(dataSourceObjectID, parentPath));
}
/**
* Finds all files in a given data source (image, local/logical files set,
* etc.) with types that match one of a collection of MIME types.
@ -138,18 +140,18 @@ public class FileManager implements Closeable {
}
/**
* Converts a data source object id and a parent path into SQL
* Converts a data source object id and a parent path into SQL
* data_source_obj_id = ? AND parent_path LIKE ?%
*
*
* @param dataSourceObjectID
* @param parentPath
* @return
*
* @return
*/
private static String createParentPathCondition(long dataSourceObjectID, String parentPath){
return "data_source_obj_id = " + dataSourceObjectID +" AND parent_path LIKE '" + parentPath +"%'";
private static String createParentPathCondition(long dataSourceObjectID, String parentPath) {
return "data_source_obj_id = " + dataSourceObjectID + " AND parent_path LIKE '" + parentPath + "%'";
}
/**
* Finds all files and directories with a given file name. The name search
* is for full or partial matches and is case insensitive (a case
@ -180,8 +182,10 @@ public class FileManager implements Closeable {
* case insensitive (a case insensitive SQL LIKE clause is used to query the
* case database).
*
* @param fileName The full name or a pattern to match on part of the name
* @param parentSubString Substring that must exist in parent path. Will be surrounded by % in LIKE query.
* @param fileName The full name or a pattern to match on part of the
* name
* @param parentSubString Substring that must exist in parent path. Will be
* surrounded by % in LIKE query.
*
* @return The matching files and directories.
*
@ -233,7 +237,7 @@ public class FileManager implements Closeable {
* LIKE clause is used to query the case database).
*
* @param dataSource The data source.
* @param fileName The full name or a pattern to match on part of the name
* @param fileName The full name or a pattern to match on part of the name
*
* @return The matching files and directories.
*
@ -254,9 +258,11 @@ public class FileManager implements Closeable {
* insensitive (a case insensitive SQL LIKE clause is used to query the case
* database).
*
* @param dataSource The data source.
* @param fileName The full name or a pattern to match on part of the name
* @param parentSubString Substring that must exist in parent path. Will be surrounded by % in LIKE query.
* @param dataSource The data source.
* @param fileName The full name or a pattern to match on part of the
* name
* @param parentSubString Substring that must exist in parent path. Will be
* surrounded by % in LIKE query.
*
* @return The matching files and directories.
*
@ -278,7 +284,7 @@ public class FileManager implements Closeable {
* database).
*
* @param dataSource The data source.
* @param fileName The full name or a pattern to match on part of the name
* @param fileName The full name or a pattern to match on part of the name
* @param parent The parent file or directory.
*
* @return The matching files and directories.
@ -360,6 +366,7 @@ public class FileManager implements Closeable {
ctime, crtime, atime, mtime,
isFile, parentObj, rederiveDetails, toolName, toolVersion, otherDetails, encodingType);
}
/**
* Update a derived file which already exists in the the case.
*
@ -370,7 +377,7 @@ public class FileManager implements Closeable {
* @param ctime The change time of the file.
* @param crtime The create time of the file
* @param atime The accessed time of the file.
* @param mimeType The MIME type the updated file should have, null
* @param mimeType The MIME type the updated file should have, null
* to unset it
* @param mtime The modified time of the file.
* @param isFile True if a file, false if a directory.
@ -614,10 +621,15 @@ public class FileManager implements Closeable {
* Closes the file manager.
*
* @throws IOException If there is a problem closing the file manager.
* @deprecated Do not use.
*/
@Deprecated
@Override
public synchronized void close() throws IOException {
caseDb = null;
/*
* No-op maintained for backwards compatibility. Clients should not
* attempt to close case services.
*/
}
/**
@ -754,7 +766,7 @@ public class FileManager implements Closeable {
* the parent local directory.
* @param localFile The local/logical file or directory.
* @param progressUpdater notifier to receive progress notifications on
* folders added, or null if not used. Called after
* folders added, or null if not used. Called after
* each file/directory is added to the case database.
*
* @return An AbstractFile representation of the local/logical file.

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Copyright 2012-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
@ -23,49 +23,35 @@ package org.sleuthkit.autopsy.casemodule.services;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
* A collection of case-level services (e.g., file manager, tags manager,
* keyword search, blackboard).
* A collection of case-level services: file manager, tags manager, keyword
* search service, artifacts blackboard.
*
* TODO (AUT-2158): This class should not extend Closeable.
*/
public class Services implements Closeable {
private final List<Closeable> services = new ArrayList<>();
private final FileManager fileManager;
private final TagsManager tagsManager;
private final KeywordSearchService keywordSearchService;
private final Blackboard blackboard;
/**
* Constructs a collection of case-level services (e.g., file manager, tags
* manager, keyword search, blackboard).
* Constructs a collection of case-level services: file manager, tags
* manager, keyword search service, artifacts blackboard.
*
* @param caseDb The case database for the current case.
*/
public Services(SleuthkitCase caseDb) {
fileManager = new FileManager(caseDb);
services.add(fileManager);
tagsManager = new TagsManager(caseDb);
services.add(tagsManager);
//This lookup fails in the functional test code. See JIRA-4571 for details.
//For the time being, the closing of this service at line 108 will be made
//null safe so that the functional tests run with no issues.
keywordSearchService = Lookup.getDefault().lookup(KeywordSearchService.class);
services.add(keywordSearchService);
blackboard = new Blackboard(caseDb);
services.add(blackboard);
}
/**
* Gets the file manager service for the current case.
* Gets the file manager for the current case.
*
* @return The file manager service for the current case.
*/
@ -74,7 +60,7 @@ public class Services implements Closeable {
}
/**
* Gets the tags manager service for the current case.
* Gets the tags manager for the current case.
*
* @return The tags manager service for the current case.
*/
@ -88,30 +74,45 @@ public class Services implements Closeable {
* @return The keyword search service for the current case.
*/
public KeywordSearchService getKeywordSearchService() {
return keywordSearchService;
return Lookup.getDefault().lookup(KeywordSearchService.class);
}
/**
* Gets the blackboard service for the current case.
* Gets the artifacts blackboard for the current case.
*
* @return @org.sleuthkit.datamodel.Blackboard Blackboard for the current
* case.
*/
public org.sleuthkit.datamodel.Blackboard getArtifactsBlackboard() {
return Case.getCurrentCase().getSleuthkitCase().getBlackboard();
}
/**
* Gets the artifacts blackboard for the current case.
*
* @return The blackboard service for the current case.
*
* @deprecated Use org.sleuthkit.autopsy.casemodule.getArtifactsBlackboard
* instead
*/
@Deprecated
public Blackboard getBlackboard() {
return blackboard;
return new Blackboard();
}
/**
* Closes the services for the current case.
*
* @throws IOException if there is a problem closing the services.
* @deprecated Do not use.
*/
@Deprecated
@Override
public void close() throws IOException {
for (Closeable service : services) {
if(service != null) {
service.close();
}
}
/*
* No-op maintained for backwards compatibility. Clients should not
* attempt to close case services.
*/
}
}

View File

@ -3254,7 +3254,15 @@ abstract class AbstractSqlEamDb implements EamDb {
*
* @throws EamDbException
*/
@Messages({"AbstractSqlEamDb.upgradeSchema.incompatible=The selected Central Repository is not compatible with the current version of the application, please upgrade the application if you wish to use this Central Repository."})
@Messages({"AbstractSqlEamDb.upgradeSchema.incompatible=The selected Central Repository is not compatible with the current version of the application, please upgrade the application if you wish to use this Central Repository.",
"# {0} - minorVersion",
"AbstractSqlEamDb.badMinorSchema.message=Bad value for schema minor version ({0}) - database is corrupt.",
"AbstractSqlEamDb.failedToReadMinorVersion.message=Failed to read schema minor version for Central Repository.",
"# {0} - majorVersion",
"AbstractSqlEamDb.badMajorSchema.message=Bad value for schema version ({0}) - database is corrupt.",
"AbstractSqlEamDb.failedToReadMajorVersion.message=Failed to read schema version for Central Repository.",
"# {0} - platformName",
"AbstractSqlEamDb.cannotUpgrage.message=Currently selected database platform \"{0}\" can not be upgraded."})
@Override
public void upgradeSchema() throws EamDbException, SQLException, IncompatibleCentralRepoException {
@ -3277,10 +3285,10 @@ abstract class AbstractSqlEamDb implements EamDb {
try {
minorVersion = Integer.parseInt(minorVersionStr);
} catch (NumberFormatException ex) {
throw new EamDbException("Bad value for schema minor version (" + minorVersionStr + ") - database is corrupt", ex);
throw new EamDbException(Bundle.AbstractSqlEamDb_badMinorSchema_message(minorVersionStr), ex);
}
} else {
throw new EamDbException("Failed to read schema minor version from db_info table");
throw new EamDbException(Bundle.AbstractSqlEamDb_failedToReadMinorVersion_message());
}
int majorVersion = 0;
@ -3291,10 +3299,10 @@ abstract class AbstractSqlEamDb implements EamDb {
try {
majorVersion = Integer.parseInt(majorVersionStr);
} catch (NumberFormatException ex) {
throw new EamDbException("Bad value for schema version (" + majorVersionStr + ") - database is corrupt", ex);
throw new EamDbException(Bundle.AbstractSqlEamDb_badMajorSchema_message(majorVersionStr), ex);
}
} else {
throw new EamDbException("Failed to read schema major version from db_info table");
throw new EamDbException(Bundle.AbstractSqlEamDb_failedToReadMajorVersion_message());
}
/*
@ -3372,7 +3380,7 @@ abstract class AbstractSqlEamDb implements EamDb {
addObjectIdIndexTemplate = SqliteEamDbSettings.getAddObjectIdIndexTemplate();
break;
default:
throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded.");
throw new EamDbException(Bundle.AbstractSqlEamDb_cannotUpgrage_message(selectedPlatform.name()));
}
final String dataSourcesTableName = "data_sources";
final String dataSourceObjectIdColumnName = "datasource_obj_id";
@ -3532,13 +3540,12 @@ abstract class AbstractSqlEamDb implements EamDb {
statement.execute("DROP TABLE old_data_sources");
break;
default:
throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded.");
throw new EamDbException(Bundle.AbstractSqlEamDb_cannotUpgrage_message(selectedPlatform.name()));
}
}
updateSchemaVersion(conn);
conn.commit();
logger.log(Level.INFO, String.format("Central Repository schema updated to version %s", SOFTWARE_CR_DB_SCHEMA_VERSION));
} catch (SQLException | EamDbException ex) {
try {
if (conn != null) {

View File

@ -1,4 +1,14 @@
# {0} - majorVersion
AbstractSqlEamDb.badMajorSchema.message=Bad value for schema version ({0}) - database is corrupt.
# {0} - minorVersion
AbstractSqlEamDb.badMinorSchema.message=Bad value for schema minor version ({0}) - database is corrupt.
# {0} - platformName
AbstractSqlEamDb.cannotUpgrage.message=Currently selected database platform "{0}" can not be upgraded.
AbstractSqlEamDb.failedToReadMajorVersion.message=Failed to read schema version for Central Repository.
AbstractSqlEamDb.failedToReadMinorVersion.message=Failed to read schema minor version for Central Repository.
AbstractSqlEamDb.upgradeSchema.incompatible=The selected Central Repository is not compatible with the current version of the application, please upgrade the application if you wish to use this Central Repository.
CorrelationAttributeInstance.invalidName.message=Invalid database table name. Name must start with a lowercase letter and can only contain lowercase letters, numbers, and '_'.
CorrelationAttributeInstance.nullName.message=Database name is null.
CorrelationType.DOMAIN.displayName=Domains
CorrelationType.EMAIL.displayName=Email Addresses
CorrelationType.FILES.displayName=Files
@ -23,4 +33,12 @@ EamCase.title.examinerName=Examiner Name:
EamCase.title.examinerPhone=Examiner Phone:
EamCase.title.notes=Notes:
EamCase.title.org=Organization:
EamDbUtil.centralRepoUpgradeFailed.message=Failed to upgrade central repository. It has been disabled.
EamDbUtil.centralRepoConnectionFailed.message=Unable to connect to Central Repository.
EamDbUtil.centralRepoDisabled.message=\ The Central Repository has been disabled.
EamDbUtil.centralRepoUpgradeFailed.message=Failed to upgrade Central Repository.
EamDbUtil.exclusiveLockAquisitionFailure.message=Unable to acquire exclusive lock for Central Repository.
PostgresEamDb.centralRepoDisabled.message=Central Repository module is not enabled.
PostgresEamDb.connectionFailed.message=Error getting connection to database.
SqliteEamDb.centralRepositoryDisabled.message=Central Repository module is not enabled.
SqliteEamDb.connectionFailedMessage.message=Error getting connection to database.
SqliteEamDb.databaseMissing.message=Central repository database missing

View File

@ -276,9 +276,11 @@ public class CorrelationAttributeInstance implements Serializable {
* @param supported Is this Type currently supported
* @param enabled Is this Type currently enabled.
*/
@Messages({"CorrelationAttributeInstance.nullName.message=Database name is null.",
"CorrelationAttributeInstance.invalidName.message=Invalid database table name. Name must start with a lowercase letter and can only contain lowercase letters, numbers, and '_'."})
public Type(int typeId, String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException {
if (dbTableName == null) {
throw new EamDbException("dbTableName is null");
throw new EamDbException(Bundle.CorrelationAttributeInstance_nullName_message());
}
this.typeId = typeId;
this.displayName = displayName;
@ -286,7 +288,7 @@ public class CorrelationAttributeInstance implements Serializable {
this.supported = supported;
this.enabled = enabled;
if (!Pattern.matches(DB_NAMES_REGEX, dbTableName)) {
throw new EamDbException("Invalid database table name. Name must start with a lowercase letter and can only contain lowercase letters, numbers, and '_'."); // NON-NLS
throw new EamDbException(Bundle.CorrelationAttributeInstance_invalidName_message()); // NON-NLS
}
}

View File

@ -171,27 +171,61 @@ public class EamDbUtil {
* upgrade fails, the Central Repository will be disabled and the current
* settings will be cleared.
*/
@Messages({"EamDbUtil.centralRepoUpgradeFailed.message=Failed to upgrade central repository. It has been disabled."})
@Messages({"EamDbUtil.centralRepoDisabled.message= The Central Repository has been disabled.",
"EamDbUtil.centralRepoUpgradeFailed.message=Failed to upgrade Central Repository.",
"EamDbUtil.centralRepoConnectionFailed.message=Unable to connect to Central Repository.",
"EamDbUtil.exclusiveLockAquisitionFailure.message=Unable to acquire exclusive lock for Central Repository."})
public static void upgradeDatabase() throws EamDbException {
if (!EamDb.isEnabled()) {
return;
}
EamDb db = null;
CoordinationService.Lock lock = null;
String messageForDialog = "";
//get connection
try {
EamDb db = EamDb.getInstance();
db = EamDb.getInstance();
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error updating central repository, unable to make connection", ex);
messageForDialog = Bundle.EamDbUtil_centralRepoConnectionFailed_message() + Bundle.EamDbUtil_centralRepoDisabled_message();
}
//get lock necessary for upgrade
if (db != null) {
try {
// This may return null if locking isn't supported, which is fine. It will
// throw an exception if locking is supported but we can't get the lock
// (meaning the database is in use by another user)
lock = db.getExclusiveMultiUserDbLock();
//perform upgrade
try {
db.upgradeSchema();
} catch (EamDbException | SQLException | IncompatibleCentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error updating central repository", ex);
messageForDialog = Bundle.EamDbUtil_centralRepoUpgradeFailed_message() + Bundle.EamDbUtil_centralRepoDisabled_message();
if (ex instanceof IncompatibleCentralRepoException) {
messageForDialog = ex.getMessage() + "\n\n" + messageForDialog;
} else if (ex instanceof EamDbException) {
messageForDialog = ex.getMessage() + Bundle.EamDbUtil_centralRepoDisabled_message();
}
} finally {
if (lock != null) {
try {
lock.release();
} catch (CoordinationServiceException ex) {
LOGGER.log(Level.SEVERE, "Error releasing database lock", ex);
}
}
}
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error updating central repository, unable to acquire exclusive lock", ex);
messageForDialog = Bundle.EamDbUtil_exclusiveLockAquisitionFailure_message() + Bundle.EamDbUtil_centralRepoDisabled_message();
}
// This may return null if locking isn't supported, which is fine. It will
// throw an exception if locking is supported but we can't get the lock
// (meaning the database is in use by another user)
lock = db.getExclusiveMultiUserDbLock();
db.upgradeSchema();
} catch (EamDbException | SQLException | IncompatibleCentralRepoException ex) {
LOGGER.log(Level.SEVERE, "Error updating central repository", ex);
// Disable the central repo and clear the current settings.
} else {
messageForDialog = Bundle.EamDbUtil_centralRepoConnectionFailed_message() + Bundle.EamDbUtil_centralRepoDisabled_message();
}
// Disable the central repo and clear the current settings.
if (!messageForDialog.isEmpty()) {
try {
if (null != EamDb.getInstance()) {
EamDb.getInstance().shutdownConnections();
@ -201,19 +235,8 @@ public class EamDbUtil {
}
EamDbPlatformEnum.setSelectedPlatform(EamDbPlatformEnum.DISABLED.name());
EamDbPlatformEnum.saveSelectedPlatform();
String messageForDialog = Bundle.EamDbUtil_centralRepoUpgradeFailed_message();
if (ex instanceof IncompatibleCentralRepoException) {
messageForDialog = ex.getMessage() + "\n\n" + messageForDialog;
}
throw new EamDbException(messageForDialog);
} finally {
if (lock != null) {
try {
lock.release();
} catch (CoordinationServiceException ex) {
LOGGER.log(Level.SEVERE, "Error releasing database lock", ex);
}
}
}
}

View File

@ -25,6 +25,7 @@ import java.sql.Statement;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import org.apache.commons.dbcp2.BasicDataSource;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coordinationservice.CoordinationService;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.coreutils.Logger;
@ -183,11 +184,13 @@ final class PostgresEamDb extends AbstractSqlEamDb {
*
* @throws EamDbException
*/
@Messages({"PostgresEamDb.centralRepoDisabled.message=Central Repository module is not enabled.",
"PostgresEamDb.connectionFailed.message=Error getting connection to database."})
@Override
protected Connection connect() throws EamDbException {
synchronized (this) {
if (!EamDb.isEnabled()) {
throw new EamDbException("Central Repository module is not enabled"); // NON-NLS
throw new EamDbException(Bundle.PostgresEamDb_centralRepoDisabled_message()); // NON-NLS
}
if (connectionPool == null) {
@ -197,7 +200,7 @@ final class PostgresEamDb extends AbstractSqlEamDb {
try {
return connectionPool.getConnection();
} catch (SQLException ex) {
throw new EamDbException("Error getting connection from connection pool.", ex); // NON-NLS
throw new EamDbException(Bundle.PostgresEamDb_connectionFailed_message(), ex); // NON-NLS
}
}

View File

@ -28,6 +28,7 @@ import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import org.apache.commons.dbcp2.BasicDataSource;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.casemodule.Case;
@ -153,10 +154,11 @@ final class SqliteEamDb extends AbstractSqlEamDb {
* Setup a connection pool for db connections.
*
*/
@Messages({"SqliteEamDb.databaseMissing.message=Central repository database missing"})
private void setupConnectionPool(boolean foreignKeysEnabled) throws EamDbException {
if (dbSettings.dbFileExists() == false) {
throw new EamDbException("Central repository database missing");
throw new EamDbException(Bundle.SqliteEamDb_databaseMissing_message());
}
connectionPool = new BasicDataSource();
@ -179,17 +181,20 @@ final class SqliteEamDb extends AbstractSqlEamDb {
/**
* Lazily setup Singleton connection on first request.
*
* @param foreignKeys determines if foreign keys should be enforced during this connection for SQLite
* @param foreignKeys determines if foreign keys should be enforced during
* this connection for SQLite
*
* @return A connection from the connection pool.
*
* @throws EamDbException
*/
@Messages({"SqliteEamDb.connectionFailedMessage.message=Error getting connection to database.",
"SqliteEamDb.centralRepositoryDisabled.message=Central Repository module is not enabled."})
@Override
protected Connection connect(boolean foreignKeys) throws EamDbException {
synchronized (this) {
if (!EamDb.isEnabled()) {
throw new EamDbException("Central Repository module is not enabled"); // NON-NLS
throw new EamDbException(Bundle.SqliteEamDb_centralRepositoryDisabled_message()); // NON-NLS
}
if (connectionPool == null) {
setupConnectionPool(foreignKeys);
@ -197,13 +202,14 @@ final class SqliteEamDb extends AbstractSqlEamDb {
try {
return connectionPool.getConnection();
} catch (SQLException ex) {
throw new EamDbException("Error getting connection from connection pool.", ex); // NON-NLS
throw new EamDbException(Bundle.SqliteEamDb_connectionFailedMessage_message(), ex); // NON-NLS
}
}
}
/**
* Lazily setup Singleton connection on first request with foreign keys enforced.
* Lazily setup Singleton connection on first request with foreign keys
* enforced.
*
* @return A connection from the connection pool.
*
@ -213,7 +219,7 @@ final class SqliteEamDb extends AbstractSqlEamDb {
protected Connection connect() throws EamDbException {
return connect(true);
}
@Override
protected String getConflictClause() {
// For sqlite, our conflict clause is part of the table schema

View File

@ -6,4 +6,4 @@ IngestEventsListener.prevCaseComment.text=Previous Case:
IngestEventsListener.prevCount.text=Number of previous {0}: {1}
IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)
IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)
Installer.centralRepoUpgradeFailed.title=Central repository upgrade failed
Installer.centralRepoUpgradeFailed.title=Central repository disabled

View File

@ -23,6 +23,7 @@ import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import static java.lang.Boolean.FALSE;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
@ -34,35 +35,40 @@ import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.coreutils.ThreadUtils;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.autopsy.ingest.events.DataSourceAnalysisCompletedEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Listen for ingest events and update entries in the Central Repository
* database accordingly
*/
@NbBundle.Messages({"IngestEventsListener.ingestmodule.name=Correlation Engine"})
public class IngestEventsListener {
private static final Logger LOGGER = Logger.getLogger(CorrelationAttributeInstance.class.getName());
private static final String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
private static int correlationModuleInstanceCount;
@ -171,8 +177,7 @@ public class IngestEventsListener {
}
/**
* Configure the listener to flag devices previously seen in other cases or
* not.
* Configure the listener to flag previously seen devices or not.
*
* @param value True to flag seen devices; otherwise false.
*/
@ -189,86 +194,68 @@ public class IngestEventsListener {
createCrProperties = value;
}
/**
* Make an Interesting Item artifact based on a new artifact being previously seen.
* @param originalArtifact Original artifact that we want to flag
* @param caseDisplayNames List of case names artifact was previously seen in
*/
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"IngestEventsListener.prevCaseComment.text=Previous Case: ",
"IngestEventsListener.ingestmodule.name=Correlation Engine"})
static private void postCorrelatedBadArtifactToBlackboard(BlackboardArtifact bbArtifact, List<String> caseDisplayNames) {
"IngestEventsListener.prevCaseComment.text=Previous Case: "})
static private void makeAndPostPreviousNotableArtifact(BlackboardArtifact originalArtifact, List<String> caseDisplayNames) {
try {
String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
AbstractFile abstractFile = tskCase.getAbstractFileById(bbArtifact.getObjectID());
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
tifArtifact.addAttributes(attributes);
try {
// index the artifact for keyword search
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
// fire event to notify UI of this new artifact
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
}
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
Bundle.IngestEventsListener_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))),
new BlackboardAttribute(
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
originalArtifact.getArtifactID()));
makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
}
/**
* Create an Interesting Aritfact hit for a device which was previously seen
* Create an Interesting Artifact hit for a device which was previously seen
* in the central repository.
*
* @param bbArtifact the artifact to create the interesting item for
* @param originalArtifact the artifact to create the interesting item for
*/
@NbBundle.Messages({"IngestEventsListener.prevExists.text=Previously Seen Devices (Central Repository)",
"# {0} - typeName",
"# {1} - count",
"IngestEventsListener.prevCount.text=Number of previous {0}: {1}"})
static private void postCorrelatedPreviousArtifactToBlackboard(BlackboardArtifact bbArtifact) {
static private void makeAndPostPreviousSeenArtifact(BlackboardArtifact originalArtifact) {
Collection<BlackboardAttribute> attributesForNewArtifact = Arrays.asList(new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text()),
new BlackboardAttribute(
TSK_ASSOCIATED_ARTIFACT, MODULE_NAME,
originalArtifact.getArtifactID()));
makeAndPostInterestingArtifact(originalArtifact, attributesForNewArtifact);
}
/**
* Make an interesting item artifact to flag the passed in artifact.
* @param originalArtifact Artifact in current case we want to flag
* @param attributesForNewArtifact Attributes to assign to the new Interesting items artifact
*/
private static void makeAndPostInterestingArtifact(BlackboardArtifact originalArtifact, Collection<BlackboardAttribute> attributesForNewArtifact) {
try {
String MODULE_NAME = Bundle.IngestEventsListener_ingestmodule_name();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
BlackboardAttribute att = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
Bundle.IngestEventsListener_prevExists_text());
attributes.add(att);
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, MODULE_NAME, bbArtifact.getArtifactID()));
SleuthkitCase tskCase = bbArtifact.getSleuthkitCase();
AbstractFile abstractFile = bbArtifact.getSleuthkitCase().getAbstractFileById(bbArtifact.getObjectID());
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
SleuthkitCase tskCase = originalArtifact.getSleuthkitCase();
AbstractFile abstractFile = tskCase.getAbstractFileById(originalArtifact.getObjectID());
Blackboard blackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT);
tifArtifact.addAttributes(attributes);
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_ARTIFACT_HIT, attributesForNewArtifact)) {
BlackboardArtifact newInterestingArtifact = abstractFile.newArtifact(TSK_INTERESTING_ARTIFACT_HIT);
newInterestingArtifact.addAttributes(attributesForNewArtifact);
try {
// index the artifact for keyword search
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
blackboard.postArtifact(newInterestingArtifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + newInterestingArtifact.getArtifactID(), ex); //NON-NLS
}
// fire event to notify UI of this new artifact
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT));
}
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
@ -283,7 +270,7 @@ public class IngestEventsListener {
public void propertyChange(PropertyChangeEvent evt) {
//if ingest is running we want there to check if there is a Correlation Engine module running
//sometimes artifacts are generated by DSPs or other sources while ingest is not running
//in these cases we still want to create correlation attributes for those artifacts when appropriate
//in these cases we still want to create correlation attributesForNewArtifact for those artifacts when appropriate
if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
EamDb dbManager;
try {
@ -319,7 +306,7 @@ public class IngestEventsListener {
LOGGER.log(Level.SEVERE, "Failed to connect to Central Repository database.", ex);
return;
}
switch (IngestManager.IngestJobEvent.valueOf(evt.getPropertyName())) {
case DATA_SOURCE_ANALYSIS_COMPLETED: {
jobProcessingExecutor.submit(new AnalysisCompleteTask(dbManager, evt));
@ -333,10 +320,10 @@ public class IngestEventsListener {
}
private final class AnalysisCompleteTask implements Runnable {
private final EamDb dbManager;
private final PropertyChangeEvent event;
private AnalysisCompleteTask(EamDb db, PropertyChangeEvent evt) {
dbManager = db;
event = evt;
@ -362,15 +349,15 @@ public class IngestEventsListener {
long dataSourceObjectId = -1;
try {
dataSource = ((DataSourceAnalysisCompletedEvent) event).getDataSource();
/*
* We only care about Images for the purpose of updating hash
* values.
* We only care about Images for the purpose of
* updating hash values.
*/
if (!(dataSource instanceof Image)) {
return;
}
dataSourceName = dataSource.getName();
dataSourceObjectId = dataSource.getId();
@ -398,7 +385,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageMd5Hash, crMd5Hash) == false) {
correlationDataSource.setMd5(imageMd5Hash);
}
String imageSha1Hash = image.getSha1();
if (imageSha1Hash == null) {
imageSha1Hash = "";
@ -407,7 +394,7 @@ public class IngestEventsListener {
if (StringUtils.equals(imageSha1Hash, crSha1Hash) == false) {
correlationDataSource.setSha1(imageSha1Hash);
}
String imageSha256Hash = image.getSha256();
if (imageSha256Hash == null) {
imageSha256Hash = "";
@ -441,8 +428,8 @@ public class IngestEventsListener {
private final boolean createCorrelationAttributes;
private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled, boolean flagPreviousItemsEnabled, boolean createCorrelationAttributes) {
dbManager = db;
event = evt;
this.dbManager = db;
this.event = evt;
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
this.flagPreviousItemsEnabled = flagPreviousItemsEnabled;
this.createCorrelationAttributes = createCorrelationAttributes;
@ -476,7 +463,7 @@ public class IngestEventsListener {
try {
caseDisplayNames = dbManager.getListCasesHavingArtifactInstancesKnownBad(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
if (!caseDisplayNames.isEmpty()) {
postCorrelatedBadArtifactToBlackboard(bbArtifact,
makeAndPostPreviousNotableArtifact(bbArtifact,
caseDisplayNames);
}
} catch (CorrelationAttributeNormalizationException ex) {
@ -484,7 +471,7 @@ public class IngestEventsListener {
}
}
if (flagPreviousItemsEnabled
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
&& (eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.USBID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.ICCID_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMEI_TYPE_ID
|| eamArtifact.getCorrelationType().getId() == CorrelationAttributeInstance.IMSI_TYPE_ID
@ -494,7 +481,7 @@ public class IngestEventsListener {
List<CorrelationAttributeInstance> previousOccurences = dbManager.getArtifactInstancesByTypeValue(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
for (CorrelationAttributeInstance instance : previousOccurences) {
if (!instance.getCorrelationCase().getCaseUUID().equals(eamArtifact.getCorrelationCase().getCaseUUID())) {
postCorrelatedPreviousArtifactToBlackboard(bbArtifact);
makeAndPostPreviousSeenArtifact(bbArtifact);
break;
}
}

View File

@ -51,7 +51,7 @@ public class Installer extends ModuleInstall {
super();
}
@NbBundle.Messages({"Installer.centralRepoUpgradeFailed.title=Central repository upgrade failed"})
@NbBundle.Messages({"Installer.centralRepoUpgradeFailed.title=Central repository disabled"})
@Override
public void restored() {
Case.addPropertyChangeListener(pcl);

View File

@ -19,41 +19,44 @@
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import java.util.List;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeNormalizationException;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationCase;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbException;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.core.RuntimeProperties;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationAttributeInstance;
import org.sleuthkit.autopsy.centralrepository.datamodel.CorrelationDataSource;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDb;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamDbPlatformEnum;
import org.sleuthkit.autopsy.centralrepository.datamodel.EamArtifactUtil;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.HashUtility;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListener;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
import org.sleuthkit.autopsy.healthmonitor.TimingMetric;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
* Ingest module for inserting entries into the Central Repository database on
@ -63,6 +66,8 @@ import org.sleuthkit.datamodel.SleuthkitCase;
"CentralRepoIngestModule.prevCaseComment.text=Previous Case: "})
final class CentralRepoIngestModule implements FileIngestModule {
private static final String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true;
static final boolean DEFAULT_FLAG_PREVIOUS_DEVICES = true;
static final boolean DEFAULT_CREATE_CR_PROPERTIES = true;
@ -74,10 +79,10 @@ final class CentralRepoIngestModule implements FileIngestModule {
private long jobId;
private CorrelationCase eamCase;
private CorrelationDataSource eamDataSource;
private Blackboard blackboard;
private CorrelationAttributeInstance.Type filesType;
private final boolean flagTaggedNotableItems;
private final boolean flagPreviouslySeenDevices;
private Blackboard blackboard;
private final boolean createCorrelationProperties;
/**
@ -104,7 +109,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
}
try {
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR;
@ -158,7 +163,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
}
}
// insert this file into the central repository
// insert this file into the central repository
if (createCorrelationProperties) {
try {
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
@ -271,7 +276,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// Don't allow sqlite central repo databases to be used for multi user cases
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
}
@ -308,7 +313,7 @@ final class CentralRepoIngestModule implements FileIngestModule {
// if we are the first thread / module for this job, then make sure the case
// and image exist in the DB before we associate artifacts with it.
if (refCounter.incrementAndGet(jobId)
== 1) {
== 1) {
// ensure we have this data source in the EAM DB
try {
if (null == centralRepoDb.getDataSource(eamCase, eamDataSource.getDataSourceObjectID())) {
@ -330,41 +335,32 @@ final class CentralRepoIngestModule implements FileIngestModule {
*/
private void postCorrelatedBadFileToBlackboard(AbstractFile abstractFile, List<String> caseDisplayNames) {
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevTaggedSet_text()),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(","))));
try {
String MODULE_NAME = CentralRepoIngestModuleFactory.getModuleName();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevTaggedSet_text()));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME,
Bundle.CentralRepoIngestModule_prevCaseComment_text() + caseDisplayNames.stream().distinct().collect(Collectors.joining(",", "", ""))));
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(abstractFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
if (!blackboard.artifactExists(abstractFile, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact tifArtifact = abstractFile.newArtifact(TSK_INTERESTING_FILE_HIT);
tifArtifact.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(tifArtifact);
blackboard.postArtifact(tifArtifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
// send inbox message
sendBadFileInboxMessage(tifArtifact, abstractFile.getName(), abstractFile.getMd5Hash());
// fire event to notify UI of this new artifact
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); // NON-NLS
}
}

View File

@ -57,7 +57,7 @@ CVTTopComponent.browseVisualizeTabPane.AccessibleContext.accessibleName=Visualiz
CVTTopComponent.vizPanel.TabConstraints.tabTitle_1=Visualize
VisualizationPanel.fitGraphButton.text=
VisualizationPanel.jTextArea1.text=Right-click an account in the Browse Accounts table, and select 'Visualize' to begin.
VisualizationPanel.fitZoomButton.toolTipText=Fit Visualization
VisualizationPanel.fitZoomButton.toolTipText=Fit visualization to available space.
VisualizationPanel.fitZoomButton.text=
# {0} - layout name
VisualizationPanel.layoutFail.text={0} layout failed. Try a different layout.
@ -67,11 +67,11 @@ VisualizationPanel.lockAction.pluralText=Lock Selected Accounts
VisualizationPanel.lockAction.singularText=Lock Selected Account
VisualizationPanel.unlockAction.pluralText=Unlock Selected Accounts
VisualizationPanel.unlockAction.singularText=Unlock Selected Account
VisualizationPanel.zoomActualButton.toolTipText=Reset Zoom
VisualizationPanel.zoomActualButton.toolTipText=Reset visualization default zoom state.
VisualizationPanel.zoomActualButton.text=
VisualizationPanel.zoomInButton.toolTipText=Zoom In
VisualizationPanel.zoomInButton.toolTipText=Zoom visualization in.
VisualizationPanel.zoomInButton.text=
VisualizationPanel.zoomOutButton.toolTipText=Zoom Out
VisualizationPanel.zoomOutButton.toolTipText=Zoom visualization out.
VisualizationPanel.zoomOutButton.text=
VisualizationPanel.fastOrganicLayoutButton.text=
VisualizationPanel.backButton.text_1=
@ -81,17 +81,17 @@ VisualizationPanel.hierarchyLayoutButton.text=Hierarchical
VisualizationPanel.clearVizButton.text_1=
VisualizationPanel.snapshotButton.text_1=Snapshot Report
VisualizationPanel.clearVizButton.actionCommand=
VisualizationPanel.backButton.toolTipText=Click to Go Back
VisualizationPanel.forwardButton.toolTipText=Click to Go Forward
VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to Redraw Chart
VisualizationPanel.clearVizButton.toolTipText=Click to Clear Chart
VisualizationPanel.backButton.toolTipText=Click to go back to previous state.
VisualizationPanel.forwardButton.toolTipText=Click to move state forward.
VisualizationPanel.fastOrganicLayoutButton.toolTipText=Click to redraw visualization.
VisualizationPanel.clearVizButton.toolTipText=Click to clear visualization.
FiltersPanel.limitHeaderLabel.text=Communications Limit:
FiltersPanel.mostRecentLabel.text=Most Recent:
FiltersPanel.limitErrorMsgLabel.text=Invalid integer value.
VisualizationPanel.forwardButton.text=
VisualizationPanel.zoomPercentLabel.text=100%
VisualizationPanel.zoomLabel.text=Zoom:
VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot Report
VisualizationPanel.snapshotButton.toolTipText=Generate Snapshot report.
VisualizationPanel_action_dialogs_title=Communications
VisualizationPanel_action_name_text=Snapshot Report
VisualizationPanel_module_name=Communications

View File

@ -299,6 +299,10 @@ public class Installer extends ModuleInstall {
* initialized later.
*/
private static void addGstreamerPathsToEnv() {
if (System.getProperty("jna.nosys") == null) {
System.setProperty("jna.nosys", "true");
}
Path gstreamerPath = InstalledFileLocator.getDefault().locate("gstreamer", Installer.class.getPackage().getName(), false).toPath();
if (gstreamerPath == null) {

View File

@ -434,7 +434,7 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
}
setColumnWidths();
/*
* Load column sorting information from preferences file and apply it to
* columns.
@ -516,12 +516,22 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
protected void setColumnWidths() {
if (rootNode.getChildren().getNodesCount() != 0) {
final Graphics graphics = outlineView.getGraphics();
if (graphics != null) {
// Current width of the outlineView
double outlineViewWidth = outlineView.getSize().getWidth();
// List of the column widths
List<Integer> columnWidths = new ArrayList<>();
final FontMetrics metrics = graphics.getFontMetrics();
int margin = 4;
int padding = 8;
int totalColumnWidth = 0;
int cntMaxSizeColumns =0;
// Calulate the width for each column keeping track of the number
// of columns that were set to columnwidthLimit.
for (int column = 0; column < outline.getModel().getColumnCount(); column++) {
int firstColumnPadding = (column == 0) ? 32 : 0;
int columnWidthLimit = (column == 0) ? 350 : 300;
@ -539,8 +549,43 @@ public class DataResultViewerTable extends AbstractDataResultViewer {
int columnWidth = Math.max(valuesWidth, headerWidth);
columnWidth += 2 * margin + padding; // add margin and regular padding
columnWidth = Math.min(columnWidth, columnWidthLimit);
columnWidth = Math.min(columnWidth, columnWidthLimit);
columnWidths.add(columnWidth);
totalColumnWidth += columnWidth;
if( columnWidth == columnWidthLimit) {
cntMaxSizeColumns++;
}
}
// Figure out how much extra, if any can be given to the columns
// so that the table is as wide as outlineViewWidth. If cntMaxSizeColumns
// is greater than 0 divide the extra space between the columns
// that could use more space. Otherwise divide evenly amoung
// all columns.
int extraWidth = 0;
if (totalColumnWidth < outlineViewWidth) {
if (cntMaxSizeColumns > 0) {
extraWidth = (int) ((outlineViewWidth - totalColumnWidth)/cntMaxSizeColumns);
} else {
extraWidth = (int) ((outlineViewWidth - totalColumnWidth)/columnWidths.size());
}
}
for(int column = 0; column < columnWidths.size(); column++) {
int columnWidth = columnWidths.get(column);
if(cntMaxSizeColumns > 0) {
if(columnWidth >= ((column == 0) ? 350 : 300)) {
columnWidth += extraWidth;
}
} else {
columnWidth += extraWidth;
}
outline.getColumnModel().getColumn(column).setPreferredWidth(columnWidth);
}
}

View File

@ -1,3 +1,10 @@
FileTypeCategory.Audio.displayName=Audio
FileTypeCategory.Documents.displayName=Documents
FileTypeCategory.Executables.displayName=Executables
FileTypeCategory.Image.displayName=Image
FileTypeCategory.Media.displayName=Media
FileTypeCategory.Video.displayName=Video
FileTypeCategory.Visual.displayName=Visual
# {0} - file name
GetOrGenerateThumbnailTask.generatingPreviewFor=Generating preview for {0}
# {0} - file name

View File

@ -196,6 +196,53 @@ public final class ExecUtil {
}
return process.exitValue();
}
/**
* Wait for the given process to finish, using the given ProcessTerminator.
*
* @param command The command that was used to start the process. Used
* only for logging purposes.
* @param process The process to wait for.
* @param terminator The ProcessTerminator used to determine if the process
* should be killed.
*
* @returnthe exit value of the process
*
* @throws SecurityException if a security manager exists and vetoes any
* aspect of running the process.
* @throws IOException if an I/o error occurs.
*/
public static int waitForTermination(String command, Process process, ProcessTerminator terminator) throws SecurityException, IOException {
return ExecUtil.waitForTermination(command, process, ExecUtil.DEFAULT_TIMEOUT, ExecUtil.DEFAULT_TIMEOUT_UNITS, terminator);
}
private static int waitForTermination(String command, Process process, long timeOut, TimeUnit units, ProcessTerminator terminator) throws SecurityException, IOException {
try {
do {
process.waitFor(timeOut, units);
if (process.isAlive() && terminator.shouldTerminateProcess()) {
killProcess(process);
try {
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
} catch (InterruptedException exx) {
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
}
}
} while (process.isAlive());
} catch (InterruptedException ex) {
if (process.isAlive()) {
killProcess(process);
}
try {
process.waitFor(); //waiting to help ensure process is shutdown before calling interrupt() or returning
} catch (InterruptedException exx) {
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, String.format("Wait for process termination following killProcess was interrupted for command %s", command));
}
Logger.getLogger(ExecUtil.class.getName()).log(Level.INFO, "Thread interrupted while running {0}", command); // NON-NLS
Thread.currentThread().interrupt();
}
return process.exitValue();
}
/**
* Kills a process and its children

View File

@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel.utils;
package org.sleuthkit.autopsy.coreutils;
import com.google.common.collect.ImmutableSet;
import static java.util.Arrays.asList;

View File

@ -1,19 +1,19 @@
/*
/*
*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
*
*
* Copyright 2012-2018 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
* Project Contact/Architect: carrier <at> sleuthkit <dot> org
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -27,12 +27,12 @@ import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.logging.Level;
/**
* Database connection class & utilities *
* Database connection class & utilities.
*/
public class SQLiteDBConnect {
public class SQLiteDBConnect implements AutoCloseable {
public String sDriver = "";
public String sUrl = null;
@ -52,7 +52,7 @@ public class SQLiteDBConnect {
* quick and dirty constructor to test the database passing the
* DriverManager name and the fully loaded url to handle
*/
/*
/*
* NB this will typically be available if you make this class concrete and
* not abstract
*/
@ -104,9 +104,13 @@ public class SQLiteDBConnect {
statement.executeUpdate(instruction);
}
// processes an array of instructions e.g. a set of SQL command strings passed from a file
//NB you should ensure you either handle empty lines in files by either removing them or parsing them out
// since they will generate spurious SQLExceptions when they are encountered during the iteration....
/** processes an array of instructions e.g. a set of SQL command strings
* passed from a file
*
* NB you should ensure you either handle empty lines in files by either
* removing them or parsing them out since they will generate spurious
* SQLExceptions when they are encountered during the iteration....
*/
public void executeStmt(String[] instructionSet) throws SQLException {
for (int i = 0; i < instructionSet.length; i++) {
executeStmt(instructionSet[i]);
@ -120,7 +124,14 @@ public class SQLiteDBConnect {
public void closeConnection() {
try {
conn.close();
} catch (Exception ignore) {
} catch (SQLException ex) {
logger.log(Level.WARNING, "Unable to close connection to SQLite DB at " + sUrl, ex);
}
//Implementing Autoclosable.close() allows this class to be used in try-with-resources.
}
@Override
public void close() {
closeConnection();
}
}

View File

@ -160,6 +160,7 @@ KeywordHits.createSheet.numChildren.name=Number of Children
KeywordHits.kwHits.text=Keyword Hits
KeywordHits.simpleLiteralSearch.text=Single Literal Keyword Search
KeywordHits.singleRegexSearch.text=Single Regular Expression Search
LayoutFileNode.getActions.viewFileInDir.text=View File in Directory
OpenIDE-Module-Name=DataModel
AbstractContentChildren.CreateTSKNodeVisitor.exception.noNodeMsg=No Node defined for the given SleuthkitItem
AbstractContentChildren.createAutopsyNodeVisitor.exception.noNodeMsg=No Node defined for the given DisplayableItem

View File

@ -51,7 +51,7 @@ public abstract class DisplayableItemNode extends AbstractNode {
*
* @throws TskCoreException
*/
static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
protected static AbstractFile findLinked(BlackboardArtifact artifact) throws TskCoreException {
BlackboardAttribute pathIDAttribute = artifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH_ID));
if (pathIDAttribute != null) {
long contentID = pathIDAttribute.getValueLong();

View File

@ -1,7 +0,0 @@
FileTypeCategory.Audio.displayName=Audio
FileTypeCategory.Documents.displayName=Documents
FileTypeCategory.Executables.displayName=Executables
FileTypeCategory.Image.displayName=Image
FileTypeCategory.Media.displayName=Media
FileTypeCategory.Video.displayName=Video
FileTypeCategory.Visual.displayName=Visual

View File

@ -34,13 +34,10 @@ import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;

View File

@ -1,16 +1,16 @@
/*
* Sample module in the public domain. Feel free to use this as a template
* for your modules.
*
*
* Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
*
* This is free and unencumbered software released into the public domain.
*
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
@ -18,34 +18,31 @@
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.sleuthkit.autopsy.examples;
import java.util.HashMap;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskData;
/**
@ -56,7 +53,7 @@ import org.sleuthkit.datamodel.TskData;
class SampleFileIngestModule implements FileIngestModule {
private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>();
private static BlackboardAttribute.ATTRIBUTE_TYPE attrType = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
private static final BlackboardAttribute.ATTRIBUTE_TYPE ATTR_TYPE = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT;
private final boolean skipKnownFiles;
private IngestJobContext context = null;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
@ -76,8 +73,8 @@ class SampleFileIngestModule implements FileIngestModule {
// Skip anything other than actual file system files.
if ((file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (file.isFile() == false)) {
|| (file.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (file.isFile() == false)) {
return IngestModule.ProcessResult.OK;
}
@ -101,7 +98,7 @@ class SampleFileIngestModule implements FileIngestModule {
// Make an attribute using the ID for the attribute attrType that
// was previously created.
BlackboardAttribute attr = new BlackboardAttribute(attrType, SampleIngestModuleFactory.getModuleName(), count);
BlackboardAttribute attr = new BlackboardAttribute(ATTR_TYPE, SampleIngestModuleFactory.getModuleName(), count);
// Add the to the general info artifact for the file. In a
// real module, you would likely have more complex data types
@ -113,13 +110,15 @@ class SampleFileIngestModule implements FileIngestModule {
// management of shared data.
addToBlackboardPostCount(context.getJobId(), 1L);
// Fire an event to notify any listeners for blackboard postings.
ModuleDataEvent event = new ModuleDataEvent(SampleIngestModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_GEN_INFO);
IngestServices.getInstance().fireModuleDataEvent(event);
/*
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
file.getSleuthkitCase().getBlackboard().postArtifact(art, SampleIngestModuleFactory.getModuleName());
return IngestModule.ProcessResult.OK;
} catch (TskCoreException ex) {
} catch (TskCoreException | Blackboard.BlackboardException ex) {
IngestServices ingestServices = IngestServices.getInstance();
Logger logger = ingestServices.getLogger(SampleIngestModuleFactory.getModuleName());
logger.log(Level.SEVERE, "Error processing file (id = " + file.getId() + ")", ex);

View File

@ -329,14 +329,38 @@ public final class IngestJobSettings {
for (IngestModuleFactory moduleFactory : moduleFactories) {
loadedModuleNames.add(moduleFactory.getModuleDisplayName());
}
/**
* Hard coding Plaso to be disabled by default. loadedModuleNames is
* passed below as the default list of enabled modules so briefly remove
* Plaso from loaded modules to get the list of enabled and disabled
* modules names. Then put Plaso back into loadedModulesNames to let the
* rest of the code continue as before.
*/
final String plasoModuleName = "Plaso";
boolean plasoLoaded = loadedModuleNames.contains(plasoModuleName);
if (plasoLoaded) {
loadedModuleNames.remove(plasoModuleName);
}
/**
* Get the enabled/disabled ingest modules settings for this context. By
* default, all loaded modules are enabled.
* default, all loaded modules except Plaso are enabled.
*/
HashSet<String> enabledModuleNames = getModulesNames(executionContext, IngestJobSettings.ENABLED_MODULES_PROPERTY, makeCsvList(loadedModuleNames));
HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, ""); //NON-NLS
HashSet<String> disabledModuleNames = getModulesNames(executionContext, IngestJobSettings.DISABLED_MODULES_PROPERTY, plasoModuleName); //NON-NLS
// If plaso was loaded, but appears in neither the enabled nor the
// disabled list, add it to the disabled list.
if (!enabledModuleNames.contains(plasoModuleName) && !disabledModuleNames.contains(plasoModuleName)) {
disabledModuleNames.add(plasoModuleName);
}
//Put plaso back into loadedModuleNames
if (plasoLoaded) {
loadedModuleNames.add(plasoModuleName);
}
/**
* Check for missing modules and create warnings if any are found.
*/

View File

@ -104,9 +104,13 @@ public final class IngestServices {
*
* @param moduleDataEvent A module data event, i.e., an event that
* encapsulates artifact data.
*
* @deprecated use org.sleuthkit.datamodel.Blackboard.postArtifact instead.
*/
@Deprecated
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
IngestManager.getInstance().fireIngestModuleDataEvent(moduleDataEvent);
}
/**
@ -171,7 +175,6 @@ public final class IngestServices {
*
* @param moduleName A unique identifier for the module.
* @param settings A mapping of setting names to setting values.
*
*/
public void setConfigSettings(String moduleName, Map<String, String> settings) {
ModuleSettings.setConfigSettings(moduleName, settings);

View File

@ -53,7 +53,7 @@ public class ModuleDataEvent extends ChangeEvent {
private Collection<BlackboardArtifact> artifacts;
/**
* @param moduleName Module name
* @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard
*/
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType) {
@ -63,9 +63,9 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
* @param moduleName Module Name
* @param moduleName Module Name
* @param blackboardArtifactType Type of the blackboard artifact posted to
* the blackboard
* the blackboard
*/
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType) {
super(blackboardArtifactType);
@ -74,10 +74,10 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
* @param moduleName Module name
* @param moduleName Module name
* @param blackboardArtifactType Type of artifact posted to the blackboard
* @param artifacts List of specific artifact ID values that were added to
* blackboard
* @param artifacts List of specific artifact ID values that
* were added to blackboard
*/
public ModuleDataEvent(String moduleName, BlackboardArtifact.Type blackboardArtifactType, Collection<BlackboardArtifact> artifacts) {
this(moduleName, blackboardArtifactType);
@ -85,10 +85,10 @@ public class ModuleDataEvent extends ChangeEvent {
}
/**
* @param moduleName Module name
* @param moduleName Module name
* @param artifactType Type of artifact that was posted to blackboard
* @param artifacts List of specific artifact values that were added to
* blackboard
* @param artifacts List of specific artifact values that were added to
* blackboard
*/
public ModuleDataEvent(String moduleName, ARTIFACT_TYPE artifactType, Collection<BlackboardArtifact> artifacts) {
this(moduleName, artifactType);

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2015-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,17 +19,23 @@
package org.sleuthkit.autopsy.keywordsearchservice;
import java.io.Closeable;
import java.io.IOException;
import org.sleuthkit.autopsy.casemodule.CaseMetadata;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
* An interface for implementations of a keyword search service.
* You can find the implementations by using Lookup, such as:
* Lookup.getDefault().lookup(KeywordSearchService.class)
* An interface for implementations of a keyword search service. You can find
* the implementations by using Lookup, such as:
*
* Lookup.getDefault().lookup(KeywordSearchService.class)
*
* although most clients should obtain a keyword search service by calling:
*
* Case.getCurrentCase().getServices().getKeywordSearchService()
*
* TODO (AUT-2158: This interface should not extend Closeable.
* TODO (AUT-2158): This interface should not extend Closeable.
*/
public interface KeywordSearchService extends Closeable {
@ -49,12 +55,18 @@ public interface KeywordSearchService extends Closeable {
*
* @param artifact The artifact to index.
*
* @deprecated Call org.sleuthkit.datamodel.Blackboard.postArtifact instead.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
@Deprecated
public void indexArtifact(BlackboardArtifact artifact) throws TskCoreException;
/**
* Add the given Content object to the text index.
* Add the given Content object to the text index. This message should only
* be used in atypical cases, such as indexing a report. Artifacts are
* indexed when org.sleuthkit.datamodel.Blackboard.postArtifact is called
* and files are indexed during ingest.
*
* @param content The content to index.
*
@ -70,5 +82,19 @@ public interface KeywordSearchService extends Closeable {
* @throws KeywordSearchServiceException if unable to delete.
*/
public void deleteTextIndex(CaseMetadata metadata) throws KeywordSearchServiceException;
/**
* Closes the keyword search service.
*
* @throws IOException If there is a problem closing the file manager.
* @deprecated Do not use.
*/
@Deprecated
default public void close() throws IOException {
/*
* No-op maintained for backwards compatibility. Clients should not
* attempt to close case services.
*/
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013-2018 Basis Technology Corp.
* Copyright 2013-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -32,12 +32,12 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskCoreException;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskDataException;
@ -297,11 +297,12 @@ public class DataSourceIntegrityIngestModule implements DataSourceIngestModule {
BlackboardArtifact verificationFailedArtifact = Case.getCurrentCase().getSleuthkitCase().newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED, img.getId());
verificationFailedArtifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
DataSourceIntegrityModuleFactory.getModuleName(), artifactComment));
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(DataSourceIntegrityModuleFactory.getModuleName(),
BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED));
Case.getCurrentCase().getServices().getArtifactsBlackboard().postArtifact(verificationFailedArtifact, DataSourceIntegrityModuleFactory.getModuleName());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating verification failed artifact", ex);
}
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting verification failed artifact", ex);
}
}
services.postMessage(IngestMessage.createMessage(messageType, DataSourceIntegrityModuleFactory.getModuleName(),

View File

@ -34,21 +34,20 @@ import java.util.logging.Level;
import net.sf.sevenzipjbinding.ArchiveFormat;
import static net.sf.sevenzipjbinding.ArchiveFormat.RAR;
import net.sf.sevenzipjbinding.ExtractAskMode;
import net.sf.sevenzipjbinding.ISequentialOutStream;
import net.sf.sevenzipjbinding.ISevenZipInArchive;
import net.sf.sevenzipjbinding.SevenZip;
import net.sf.sevenzipjbinding.SevenZipException;
import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
import net.sf.sevenzipjbinding.ExtractOperationResult;
import net.sf.sevenzipjbinding.IArchiveExtractCallback;
import net.sf.sevenzipjbinding.ICryptoGetTextPassword;
import net.sf.sevenzipjbinding.ISequentialOutStream;
import net.sf.sevenzipjbinding.ISevenZipInArchive;
import net.sf.sevenzipjbinding.PropID;
import net.sf.sevenzipjbinding.SevenZip;
import net.sf.sevenzipjbinding.SevenZipException;
import net.sf.sevenzipjbinding.SevenZipNativeInitializationException;
import org.netbeans.api.progress.ProgressHandle;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.FileUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
@ -58,36 +57,43 @@ import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.EncodedFileOutputStream;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
class SevenZipExtractor {
private static final Logger logger = Logger.getLogger(SevenZipExtractor.class.getName());
private IngestServices services = IngestServices.getInstance();
private final IngestJobContext context;
private final FileTypeDetector fileTypeDetector;
private static final String MODULE_NAME = EmbeddedFileExtractorModuleFactory.getModuleName();
//encryption type strings
private static final String ENCRYPTION_FILE_LEVEL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFileLevel");
private static final String ENCRYPTION_FULL = NbBundle.getMessage(EmbeddedFileExtractorIngestModule.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.encryptionFull");
//zip bomb detection
private static final int MAX_DEPTH = 4;
private static final int MAX_COMPRESSION_RATIO = 600;
private static final long MIN_COMPRESSION_RATIO_SIZE = 500 * 1000000L;
private static final long MIN_FREE_DISK_SPACE = 1 * 1000 * 1000000L; //1GB
private IngestServices services = IngestServices.getInstance();
private final IngestJobContext context;
private final FileTypeDetector fileTypeDetector;
private String moduleDirRelative;
private String moduleDirAbsolute;
@ -244,44 +250,43 @@ class SevenZipExtractor {
*/
private void flagRootArchiveAsZipBomb(Archive rootArchive, AbstractFile archiveFile, String details, String escapedFilePath) {
rootArchive.flagAsZipBomb();
logger.log(Level.INFO, details); //NON-NLS
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);
logger.log(Level.INFO, details);
try {
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, EmbeddedFileExtractorModuleFactory.getModuleName(),
"Possible Zip Bomb"));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION,
EmbeddedFileExtractorModuleFactory.getModuleName(),
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())));
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT,
EmbeddedFileExtractorModuleFactory.getModuleName(),
details));
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
"Possible Zip Bomb"),
new BlackboardAttribute(
TSK_DESCRIPTION, MODULE_NAME,
Bundle.SevenZipExtractor_zipBombArtifactCreation_text(archiveFile.getName())),
new BlackboardAttribute(
TSK_COMMENT, MODULE_NAME,
details));
SleuthkitCase tskCase = Case.getCurrentCaseThrows().getSleuthkitCase();
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(archiveFile, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
if (!blackboard.artifactExists(archiveFile, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = rootArchive.getArchiveFile().newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(artifact);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
String msg = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.isZipBombCheck.warnMsg", archiveFile.getName(), escapedFilePath);//NON-NLS
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
}
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for Zip Bomb Detection for file: " + escapedFilePath, ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
}
}
@ -467,13 +472,11 @@ class SevenZipExtractor {
}
/**
* Unpack the file to local folder and return a list of derived files
* Unpack the file to local folder.
*
* @param archiveFile file to unpack
* @param depthMap - a concurrent hashmap which keeps track of the depth
* of all nested archives, key of objectID
*
* @return true if unpacking is complete
*/
void unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthMap) {
unpack(archiveFile, depthMap, null);
@ -510,7 +513,7 @@ class SevenZipExtractor {
//recursion depth check for zip bomb
Archive parentAr;
try {
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
unpackSuccessful = false;
@ -626,7 +629,7 @@ class SevenZipExtractor {
escapedArchiveFilePath, archiveItemPath);
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.notEnoughDiskSpace.details");
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
logger.log(Level.INFO, "Skipping archive item due to insufficient disk space: {0}, {1}", new String[]{escapedArchiveFilePath, archiveItemPath}); //NON-NLS
logger.log(Level.INFO, "Available disk space: {0}", new Object[]{freeDiskSpace}); //NON-NLS
unpackSuccessful = false;
@ -654,7 +657,7 @@ class SevenZipExtractor {
localFile.createNewFile();
} catch (IOException e) {
logger.log(Level.SEVERE, "Error creating extracted file: "//NON-NLS
+ localFile.getAbsolutePath(), e);
+ localFile.getAbsolutePath(), e);
}
}
} catch (SecurityException e) {
@ -689,7 +692,7 @@ class SevenZipExtractor {
//inArchiveItemIndex. False indicates non-test mode
inArchive.extract(extractionIndices, false, archiveCallBack);
unpackSuccessful = unpackSuccessful & archiveCallBack.wasSuccessful();
unpackSuccessful &= archiveCallBack.wasSuccessful();
archiveDetailsMap = null;
@ -730,7 +733,7 @@ class SevenZipExtractor {
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.errUnpacking.details",
escapedArchiveFilePath, ex.getMessage());
services.postMessage(IngestMessage.createErrorMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
services.postMessage(IngestMessage.createErrorMessage(MODULE_NAME, msg, details));
}
} finally {
if (inArchive != null) {
@ -760,18 +763,21 @@ class SevenZipExtractor {
String encryptionType = fullEncryption ? ENCRYPTION_FULL : ENCRYPTION_FILE_LEVEL;
try {
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, EmbeddedFileExtractorModuleFactory.getModuleName(), encryptionType));
artifact.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT, MODULE_NAME, encryptionType));
try {
// index the artifact for keyword search
blackboard.indexArtifact(artifact);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
logger.log(Level.SEVERE, "Unable to post blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.SevenZipExtractor_indexError_message(), artifact.getDisplayName());
}
services.fireModuleDataEvent(new ModuleDataEvent(EmbeddedFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + escapedArchiveFilePath, ex); //NON-NLS
}
@ -780,8 +786,8 @@ class SevenZipExtractor {
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.msg");
String details = NbBundle.getMessage(SevenZipExtractor.class,
"EmbeddedFileExtractorIngestModule.ArchiveExtractor.unpack.encrFileDetected.details",
currentArchiveName, EmbeddedFileExtractorModuleFactory.getModuleName());
services.postMessage(IngestMessage.createWarningMessage(EmbeddedFileExtractorModuleFactory.getModuleName(), msg, details));
currentArchiveName, MODULE_NAME);
services.postMessage(IngestMessage.createWarningMessage(MODULE_NAME, msg, details));
}
// adding unpacked extracted derived files to the job after closing relevant resources.
@ -871,7 +877,7 @@ class SevenZipExtractor {
private final String localAbsPath;
private final String localRelPath;
public InArchiveItemDetails(
InArchiveItemDetails(
SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode,
String localAbsPath, String localRelPath) {
this.unpackedNode = unpackedNode;
@ -916,10 +922,10 @@ class SevenZipExtractor {
private boolean unpackSuccessful = true;
public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
AbstractFile archiveFile, ProgressHandle progressHandle,
Map<Integer, InArchiveItemDetails> archiveDetailsMap,
String password, long freeDiskSpace) {
StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
AbstractFile archiveFile, ProgressHandle progressHandle,
Map<Integer, InArchiveItemDetails> archiveDetailsMap,
String password, long freeDiskSpace) {
this.inArchive = inArchive;
this.progressHandle = progressHandle;
@ -944,7 +950,7 @@ class SevenZipExtractor {
*/
@Override
public ISequentialOutStream getStream(int inArchiveItemIndex,
ExtractAskMode mode) throws SevenZipException {
ExtractAskMode mode) throws SevenZipException {
this.inArchiveItemIndex = inArchiveItemIndex;
@ -970,7 +976,7 @@ class SevenZipExtractor {
}
} catch (IOException ex) {
logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
+ "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
return null;
}
@ -1002,7 +1008,7 @@ class SevenZipExtractor {
: accessTime.getTime() / 1000;
progressHandle.progress(archiveFile.getName() + ": "
+ (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
+ (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
inArchiveItemIndex);
}
@ -1017,6 +1023,7 @@ class SevenZipExtractor {
*/
@Override
public void setOperationResult(ExtractOperationResult result) throws SevenZipException {
final SevenZipExtractor.UnpackedTree.UnpackedNode unpackedNode
= archiveDetailsMap.get(inArchiveItemIndex).getUnpackedNode();
final String localRelPath = archiveDetailsMap.get(
@ -1218,7 +1225,7 @@ class SevenZipExtractor {
if (existingFile == null) {
df = fileManager.addDerivedFile(node.getFileName(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
node.isIsFile(), node.getParent().getFile(), "", EmbeddedFileExtractorModuleFactory.getModuleName(),
node.isIsFile(), node.getParent().getFile(), "", MODULE_NAME,
"", "", TskData.EncodingType.XOR1);
statusMap.put(getKeyAbstractFile(df), new ZipFileStatusWrapper(df, ZipFileStatus.EXISTS));
} else {
@ -1232,7 +1239,7 @@ class SevenZipExtractor {
String mimeType = existingFile.getFile().getMIMEType().equalsIgnoreCase("application/octet-stream") ? null : existingFile.getFile().getMIMEType();
df = fileManager.updateDerivedFile((DerivedFile) existingFile.getFile(), node.getLocalRelPath(), node.getSize(),
node.getCtime(), node.getCrtime(), node.getAtime(), node.getMtime(),
node.isIsFile(), mimeType, "", EmbeddedFileExtractorModuleFactory.getModuleName(),
node.isIsFile(), mimeType, "", MODULE_NAME,
"", "", TskData.EncodingType.XOR1);
} else {
//ALREADY CURRENT - SKIP
@ -1327,8 +1334,8 @@ class SevenZipExtractor {
}
void addDerivedInfo(long size,
boolean isFile,
long ctime, long crtime, long atime, long mtime, String relLocalPath) {
boolean isFile,
long ctime, long crtime, long atime, long mtime, String relLocalPath) {
this.size = size;
this.isFile = isFile;
this.ctime = ctime;

View File

@ -19,26 +19,24 @@
package org.sleuthkit.autopsy.modules.encryptiondetection;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
@ -57,8 +55,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/**
* Create an EncryptionDetectionDataSourceIngestModule object that will
* detect volumes that are encrypted and create blackboard artifacts as
* appropriate. The supplied EncryptionDetectionIngestJobSettings object is
* used to configure the module.
* appropriate.
*
* @param settings The Settings used to configure the module.
*/
EncryptionDetectionDataSourceIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy();
@ -67,7 +66,7 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
@Override
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
validateSettings();
blackboard = Case.getCurrentCase().getServices().getBlackboard();
blackboard = Case.getCurrentCase().getSleuthkitCase().getBlackboard();
this.context = context;
}
@ -144,9 +143,9 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
/**
* Create a blackboard artifact.
*
* @param volume The volume to be processed.
* @param volume The volume to be processed.
* @param artifactType The type of artifact to create.
* @param comment A comment to be attached to the artifact.
* @param comment A comment to be attached to the artifact.
*
* @return 'OK' if the volume was processed successfully, or 'ERROR' if
* there was a problem.
@ -163,18 +162,14 @@ final class EncryptionDetectionDataSourceIngestModule implements DataSourceInges
try {
/*
* Index the artifact for keyword search.
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.indexArtifact(artifact);
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}
/*
* Send an event to update the view with the new result.
*/
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
/*
* Make an ingest inbox message.
*/

View File

@ -25,13 +25,11 @@ import com.healthmarketscience.jackcess.InvalidCredentialsException;
import com.healthmarketscience.jackcess.impl.CodecProvider;
import com.healthmarketscience.jackcess.impl.UnsupportedCodecException;
import com.healthmarketscience.jackcess.util.MemFileChannel;
import java.io.IOException;
import java.util.Collections;
import java.util.logging.Level;
import org.sleuthkit.datamodel.ReadContentInputStream;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.BufferUnderflowException;
import java.util.logging.Level;
import org.apache.tika.exception.EncryptedDocumentException;
import org.apache.tika.exception.TikaException;
import org.apache.tika.metadata.Metadata;
@ -41,18 +39,18 @@ import org.apache.tika.sax.BodyContentHandler;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -93,9 +91,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
/**
* Create a EncryptionDetectionFileIngestModule object that will detect
* files that are either encrypted or password protected and create
* blackboard artifacts as appropriate. The supplied
* EncryptionDetectionIngestJobSettings object is used to configure the
* module.
* blackboard artifacts as appropriate.
*
* @param settings The settings used to configure the module.
*/
EncryptionDetectionFileIngestModule(EncryptionDetectionIngestJobSettings settings) {
minimumEntropy = settings.getMinimumEntropy();
@ -108,8 +106,9 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
public void startUp(IngestJobContext context) throws IngestModule.IngestModuleException {
try {
validateSettings();
this.context = context;
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
this.context = context;
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
fileTypeDetector = new FileTypeDetector();
} catch (FileTypeDetector.FileTypeDetectorInitException ex) {
throw new IngestModule.IngestModuleException("Failed to create file type detector", ex);
@ -131,12 +130,12 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
* verify the file hasn't been deleted.
*/
if (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)
&& !file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL_DIR)
&& (!file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK) || slackFilesAllowed)
&& !file.getKnown().equals(TskData.FileKnown.KNOWN)
&& !file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) {
/*
* Is the file in FILE_IGNORE_LIST?
*/
@ -206,18 +205,14 @@ final class EncryptionDetectionFileIngestModule extends FileIngestModuleAdapter
try {
/*
* Index the artifact for keyword search.
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.indexArtifact(artifact);
blackboard.postArtifact(artifact, EncryptionDetectionModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
}
/*
* Send an event to update the view with the new result.
*/
services.fireModuleDataEvent(new ModuleDataEvent(EncryptionDetectionModuleFactory.getModuleName(), artifactType, Collections.singletonList(artifact)));
/*
* Make an ingest inbox message.
*/

View File

@ -1,5 +1,5 @@
CannotRunFileTypeDetection=Cannot run file type detection.
ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search.
ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s).
OpenIDE-Module-Display-Category=Ingest Module
OpenIDE-Module-Long-Description=\
Exif metadata ingest module. \n\n\

View File

@ -28,39 +28,38 @@ import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MAKE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_MODEL;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
@ -70,20 +69,16 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
* files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact.
*/
@NbBundle.Messages({
"CannotRunFileTypeDetection=Cannot run file type detection."
})
@NbBundle.Messages({"CannotRunFileTypeDetection=Cannot run file type detection."})
public final class ExifParserFileIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private final AtomicInteger filesProcessed = new AtomicInteger(0);
private static final String MODULE_NAME = ExifParserModuleFactory.getModuleName();
private long jobId;
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private FileTypeDetector fileTypeDetector;
private final HashSet<String> supportedMimeTypes = new HashSet<>();
private TimeZone timeZone = null;
private Case currentCase;
private Blackboard blackboard;
ExifParserFileIngestModule() {
@ -103,18 +98,18 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
}
@Messages({"ExifParserFileIngestModule.indexError.message=Failed to post EXIF Metadata artifact(s)."})
@Override
public ProcessResult process(AbstractFile content) {
try {
currentCase = Case.getCurrentCaseThrows();
blackboard = currentCase.getServices().getBlackboard();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.INFO, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
//skip unalloc
if ((content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
|| (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.SLACK)))) {
return ProcessResult.OK;
}
@ -135,14 +130,9 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
return processFile(content);
}
@Messages({"ExifParserFileIngestModule.indexError.message=Failed to index EXIF Metadata artifact for keyword search."})
ProcessResult processFile(AbstractFile file) {
InputStream in = null;
BufferedInputStream bin = null;
private ProcessResult processFile(AbstractFile file) {
try {
in = new ReadContentInputStream(file);
bin = new BufferedInputStream(in);
try (BufferedInputStream bin = new BufferedInputStream(new ReadContentInputStream(file));) {
Collection<BlackboardAttribute> attributes = new ArrayList<>();
Metadata metadata = ImageMetadataReader.readMetadata(bin);
@ -165,7 +155,7 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
}
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL, timeZone);
if (date != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ExifParserModuleFactory.getModuleName(), date.getTime() / 1000));
attributes.add(new BlackboardAttribute(TSK_DATETIME_CREATED, MODULE_NAME, date.getTime() / 1000));
}
}
@ -174,15 +164,13 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) {
double latitude = loc.getLatitude();
double longitude = loc.getLongitude();
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, ExifParserModuleFactory.getModuleName(), latitude));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, ExifParserModuleFactory.getModuleName(), longitude));
attributes.add(new BlackboardAttribute(TSK_GEO_LATITUDE, MODULE_NAME, loc.getLatitude()));
attributes.add(new BlackboardAttribute(TSK_GEO_LONGITUDE, MODULE_NAME, loc.getLongitude()));
}
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_ALTITUDE);
if (altitude != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE, ExifParserModuleFactory.getModuleName(), altitude.doubleValue()));
attributes.add(new BlackboardAttribute(TSK_GEO_ALTITUDE, MODULE_NAME, altitude.doubleValue()));
}
}
@ -191,36 +179,30 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (StringUtils.isNotBlank(model)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL, ExifParserModuleFactory.getModuleName(), model));
attributes.add(new BlackboardAttribute(TSK_DEVICE_MODEL, MODULE_NAME, model));
}
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
if (StringUtils.isNotBlank(make)) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE, ExifParserModuleFactory.getModuleName(), make));
attributes.add(new BlackboardAttribute(TSK_DEVICE_MAKE, MODULE_NAME, make));
}
}
// Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) {
SleuthkitCase tskCase = currentCase.getSleuthkitCase();
org.sleuthkit.datamodel.Blackboard tskBlackboard = tskCase.getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF, attributes)) {
BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
if (!blackboard.artifactExists(file, TSK_METADATA_EXIF, attributes)) {
BlackboardArtifact bba = file.newArtifact(TSK_METADATA_EXIF);
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bba);
blackboard.postArtifact(bba, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
Bundle.ExifParserFileIngestModule_indexError_message(), bba.getDisplayName());
}
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(),
BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF,
Collections.singletonList(bba)));
}
}
@ -237,24 +219,12 @@ public final class ExifParserFileIngestModule implements FileIngestModule {
} catch (IOException ex) {
logger.log(Level.WARNING, String.format("IOException when parsing image file '%s/%s' (id=%d).", file.getParentPath(), file.getName(), file.getId()), ex); //NON-NLS
return ProcessResult.ERROR;
} finally {
try {
if (in != null) {
in.close();
}
if (bin != null) {
bin.close();
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to close InputStream.", ex); //NON-NLS
return ProcessResult.ERROR;
}
}
}
/**
* Checks if should try to attempt to extract exif. Currently checks if JPEG
* image (by signature)
* Checks if should try to attempt to extract exif. Currently checks if
* JPEG, TIFF or X-WAV (by signature)
*
* @param f file to be checked
*

View File

@ -18,7 +18,6 @@
*/
package org.sleuthkit.autopsy.modules.fileextmismatch;
import java.util.Collections;
import java.util.HashMap;
import java.util.Set;
import java.util.logging.Level;
@ -26,7 +25,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
@ -34,10 +32,10 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.fileextmismatch.FileExtMismatchDetectorModuleSettings.CHECK_TYPE;
import org.sleuthkit.autopsy.modules.filetypeid.FileTypeDetector;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.TskData;
@ -110,7 +108,7 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
@Messages({"FileExtMismatchIngestModule.indexError.message=Failed to index file extension mismatch artifact for keyword search."})
public ProcessResult process(AbstractFile abstractFile) {
try {
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.WARNING, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
@ -121,15 +119,15 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|| (abstractFile.isFile() == false)) {
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)
|| (abstractFile.isFile() == false)) {
return ProcessResult.OK;
}
// deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC))
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
|| (abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ProcessResult.OK;
}
@ -145,14 +143,17 @@ public class FileExtMismatchIngestModule implements FileIngestModule {
BlackboardArtifact bart = abstractFile.newArtifact(ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bart);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of this
* new artifact
*/
blackboard.postArtifact(bart, FileExtMismatchDetectorModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bart.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(FileExtMismatchDetectorModuleFactory.getModuleName(), Bundle.FileExtMismatchIngestModule_indexError_message());
}
services.fireModuleDataEvent(new ModuleDataEvent(FileExtMismatchDetectorModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart)));
}
return ProcessResult.OK;
} catch (TskException ex) {

View File

@ -18,40 +18,42 @@
*/
package org.sleuthkit.autopsy.modules.filetypeid;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.modules.filetypeid.CustomFileTypesManager.CustomFileTypesException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Detects the type of a file based on signature (magic) values. Posts results
* to the blackboard.
*/
@NbBundle.Messages({
"CannotRunFileTypeDetection=Unable to run file type detection."
})
@NbBundle.Messages({"CannotRunFileTypeDetection=Unable to run file type detection."})
public class FileTypeIdIngestModule implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
private long jobId;
private static final HashMap<Long, IngestJobTotals> totalsForIngestJobs = new HashMap<>();
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private long jobId;
private FileTypeDetector fileTypeDetector;
/**
@ -146,26 +148,34 @@ public class FileTypeIdIngestModule implements FileIngestModule {
* @param fileType The file type rule for categorizing the hit.
*/
private void createInterestingFileHit(AbstractFile file, FileType fileType) {
List<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(
TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(),
fileType.getInterestingFilesSetName()),
new BlackboardAttribute(
TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(),
fileType.getMimeType()));
try {
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, FileTypeIdModuleFactory.getModuleName(), fileType.getInterestingFilesSetName()));
attributes.add(new BlackboardAttribute(
BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, FileTypeIdModuleFactory.getModuleName(), fileType.getMimeType()));
Case currentCase = Case.getCurrentCaseThrows();
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
if (!tskBlackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
try {
currentCase.getServices().getBlackboard().indexArtifact(artifact);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of this
* new artifact
*/
tskBlackboard.postArtifact(artifact, FileTypeIdModuleFactory.getModuleName());
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, String.format("Unable to index TSK_INTERESTING_FILE_HIT blackboard artifact %d (file obj_id=%d)", artifact.getArtifactID(), file.getId()), ex); //NON-NLS
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Unable to create TSK_INTERESTING_FILE_HIT artifact for file (obj_id=%d)", file.getId()), ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
@ -227,5 +237,4 @@ public class FileTypeIdIngestModule implements FileIngestModule {
long matchTime = 0;
long numFiles = 0;
}
}

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.modules.hashdatabase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
@ -30,7 +29,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.healthmonitor.HealthMonitor;
@ -39,9 +37,9 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.modules.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
@ -102,7 +100,7 @@ public class HashDbIngestModule implements FileIngestModule {
* object is used to configure the module.
*
* @param settings The module settings.
*
*
* @throws NoCurrentCaseException If there is no open case.
*/
HashDbIngestModule(HashLookupModuleSettings settings) throws NoCurrentCaseException {
@ -170,7 +168,7 @@ public class HashDbIngestModule implements FileIngestModule {
@Override
public ProcessResult process(AbstractFile file) {
try {
blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
@ -178,7 +176,7 @@ public class HashDbIngestModule implements FileIngestModule {
// Skip unallocated space files.
if ((file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
|| file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK))) {
return ProcessResult.OK;
}
@ -356,8 +354,11 @@ public class HashDbIngestModule implements FileIngestModule {
badFile.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(badFile);
/*
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(badFile, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + badFile.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@ -400,7 +401,6 @@ public class HashDbIngestModule implements FileIngestModule {
abstractFile.getName() + md5Hash,
badFile));
}
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
} catch (TskException ex) {
logger.log(Level.WARNING, "Error creating blackboard artifact", ex); //NON-NLS
}
@ -414,7 +414,7 @@ public class HashDbIngestModule implements FileIngestModule {
* @param knownHashSets The list of hash sets for "known" files.
*/
private static synchronized void postSummary(long jobId,
List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
List<HashDb> knownBadHashSets, List<HashDb> knownHashSets) {
IngestJobTotals jobTotals = getTotalsForIngestJobs(jobId);
totalsForIngestJobs.remove(jobId);

View File

@ -30,12 +30,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream.ReadContentInputStreamException;
@ -58,7 +58,7 @@ final class CallLogAnalyzer {
/**
* Find call logs given an ingest job context and index the results.
*
*
* @param context The ingest job context.
*/
public void findCallLogs(IngestJobContext context) {
@ -69,7 +69,7 @@ final class CallLogAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return;
}
blackboard = openCase.getServices().getBlackboard();
blackboard = openCase.getSleuthkitCase().getBlackboard();
List<AbstractFile> absFiles;
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
@ -98,7 +98,7 @@ final class CallLogAnalyzer {
/**
* Index results for call logs found in the database.
*
*
* @param DatabasePath The path to the database.
* @param fileId The ID of the file associated with artifacts.
*/
@ -162,8 +162,12 @@ final class CallLogAnalyzer {
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bba);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of
* this new artifact
*/
blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@ -184,7 +188,5 @@ final class CallLogAnalyzer {
} catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); //NON-NLS
}
}
}

View File

@ -35,12 +35,12 @@ import java.util.logging.Level;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -75,7 +75,7 @@ final class ContactAnalyzer {
return;
}
blackboard = openCase.getServices().getBlackboard();
blackboard = openCase.getSleuthkitCase().getBlackboard();
List<AbstractFile> absFiles;
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
@ -183,7 +183,7 @@ final class ContactAnalyzer {
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bba);
blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(

View File

@ -31,12 +31,12 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.ReadContentInputStream;
@ -73,7 +73,7 @@ class TextMessageAnalyzer {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return;
}
blackboard = openCase.getServices().getBlackboard();
blackboard = openCase.getSleuthkitCase().getBlackboard();
try {
SleuthkitCase skCase = openCase.getSleuthkitCase();
absFiles = skCase.findAllFilesWhere("name ='mmssms.db'"); //NON-NLS //get exact file name
@ -168,8 +168,11 @@ class TextMessageAnalyzer {
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(bba);
/*
* post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of
* this new artifact
*/ blackboard.postArtifact(bba, moduleName);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(
@ -191,7 +194,5 @@ class TextMessageAnalyzer {
} catch (Exception e) {
logger.log(Level.SEVERE, "Error parsing text messages to Blackboard", e); //NON-NLS
}
}
}

View File

@ -18,10 +18,8 @@
*/
package org.sleuthkit.autopsy.modules.iOS;
import org.openide.util.lookup.ServiceProvider;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;

View File

@ -19,8 +19,8 @@
package org.sleuthkit.autopsy.modules.interestingitems;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ -29,7 +29,6 @@ import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
@ -37,10 +36,13 @@ import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleReferenceCounter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -48,15 +50,15 @@ import org.sleuthkit.datamodel.TskData;
* A file ingest module that generates interesting files set hit artifacts for
* files that match interesting files set definitions.
*/
@NbBundle.Messages({
"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."
})
@NbBundle.Messages({"FilesIdentifierIngestModule.getFilesError=Error getting interesting files sets from file."})
final class FilesIdentifierIngestModule implements FileIngestModule {
private static final Object sharedResourcesLock = new Object();
private static final Logger logger = Logger.getLogger(FilesIdentifierIngestModule.class.getName());
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final Map<Long, List<FilesSet>> interestingFileSetsByJob = new ConcurrentHashMap<>();
private static final String MODULE_NAME = InterestingItemsIngestModuleFactory.getModuleName();
private final FilesIdentifierIngestJobSettings settings;
private final IngestServices services = IngestServices.getInstance();
private IngestJobContext context;
@ -72,9 +74,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
this.settings = settings;
}
/**
* @inheritDoc
*/
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
@ -100,21 +99,16 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
}
}
/**
* @inheritDoc
*/
@Override
@Messages({"FilesIdentifierIngestModule.indexError.message=Failed to index interesting file hit artifact for keyword search."})
public ProcessResult process(AbstractFile file) {
Case currentCase;
try {
currentCase = Case.getCurrentCaseThrows();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
blackboard = currentCase.getServices().getBlackboard();
// Skip slack space files.
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.SLACK)) {
return ProcessResult.OK;
@ -126,48 +120,46 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
String ruleSatisfied = filesSet.fileIsMemberOf(file);
if (ruleSatisfied != null) {
try {
// Post an interesting files set hit artifact to the
// blackboard.
String moduleName = InterestingItemsIngestModuleFactory.getModuleName();
Collection<BlackboardAttribute> attributes = new ArrayList<>();
// Add a set name attribute to the artifact. This adds a
// fair amount of redundant data to the attributes table
// (i.e., rows that differ only in artifact id), but doing
// otherwise would requires reworking the interesting files
// set hit artifact.
BlackboardAttribute setNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, moduleName, filesSet.getName());
attributes.add(setNameAttribute);
Collection<BlackboardAttribute> attributes = Arrays.asList(
/*
* Add a set name attribute to the artifact. This
* adds a fair amount of redundant data to the
* attributes table (i.e., rows that differ only in
* artifact id), but doing otherwise would requires
* reworking the interesting files set hit artifact. */
new BlackboardAttribute(
TSK_SET_NAME, MODULE_NAME,
filesSet.getName()),
/*
* Add a category attribute to the artifact to
* record the interesting files set membership rule
* that was satisfied. */
new BlackboardAttribute(
TSK_CATEGORY, MODULE_NAME,
ruleSatisfied)
);
// Add a category attribute to the artifact to record the
// interesting files set membership rule that was satisfied.
BlackboardAttribute ruleNameAttribute = new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, moduleName, ruleSatisfied);
attributes.add(ruleNameAttribute);
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact artifact = file.newArtifact(TSK_INTERESTING_FILE_HIT);
artifact.addAttributes(attributes);
try {
// index the artifact for keyword search
blackboard.indexArtifact(artifact);
// Post thet artifact to the blackboard.
blackboard.postArtifact(artifact, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + artifact.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.FilesIdentifierIngestModule_indexError_message(), artifact.getDisplayName());
}
services.fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, Collections.singletonList(artifact)));
// make an ingest inbox message
StringBuilder detailsSb = new StringBuilder();
detailsSb.append("File: " + file.getParentPath() + file.getName() + "<br/>\n");
detailsSb.append("Rule Set: " + filesSet.getName());
detailsSb.append("File: ").append(file.getParentPath()).append(file.getName()).append("<br/>\n");
detailsSb.append("Rule Set: ").append(filesSet.getName());
services.postMessage(IngestMessage.createDataMessage(InterestingItemsIngestModuleFactory.getModuleName(),
"Interesting File Match: " + filesSet.getName() + "(" + file.getName() +")",
"Interesting File Match: " + filesSet.getName() + "(" + file.getName() + ")",
detailsSb.toString(),
file.getName(),
artifact));
@ -180,9 +172,6 @@ final class FilesIdentifierIngestModule implements FileIngestModule {
return ProcessResult.OK;
}
/**
* @inheritDoc
*/
@Override
public void shutDown() {
if (context != null) {

View File

@ -0,0 +1,5 @@
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.

View File

@ -0,0 +1,29 @@
# {0} - file that events are from
PlasoIngestModule.artifact.progress=Adding events to case: {0}
PlasoIngestModule.bad.imageFile=Cannot find image file name and path
PlasoIngestModule.completed=Plaso Processing Completed
PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation
PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image.
PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.
PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.
PlasoIngestModule.error.running.psort=Error running Psort, see log file.
PlasoIngestModule.event.datetime=Event Date Time
PlasoIngestModule.event.description=Event Description
PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.
PlasoIngestModule.executable.not.found=Plaso Executable Not Found.
PlasoIngestModule.has.run=Plaso Plugin has been run.
PlasoIngestModule.info.empty.database=Plaso database was empty.
PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled
PlasoIngestModule.psort.cancelled=psort run was canceled
PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete.
PlasoIngestModule.requires.windows=Plaso module requires windows.
PlasoIngestModule.running.psort=Running Psort
PlasoIngestModule.starting.log2timeline=Starting Log2timeline
PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings
PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source.
PlasoModuleFactory_moduleName=Plaso
PlasoModuleSettingsPanel.winRegCheckBox.text=winreg: Parser for Windows NT Registry (REGF) files.
PlasoModuleSettingsPanel.peCheckBox.text=pe: Parser for Portable Executable (PE) files.
PlasoModuleSettingsPanel.plasoParserInfoTextArea.text=All modules except chrome_cache* and the below are enabled. Enabling these will cause Plaso to run slower.
PlasoModuleSettingsPanel.noteLabel.text=NOTE: This module can take a long time to run.
PlasoModuleSettingsPanel.disabledNoteLabel.text=* Disabled because it duplicates existing Autopsy modules.

View File

@ -0,0 +1,478 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import static java.util.Objects.nonNull;
import java.util.logging.Level;
import java.util.stream.Collectors;
import org.openide.modules.InstalledFileLocator;
import org.openide.util.Cancellable;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.SQLiteDBConnect;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Blackboard.BlackboardException;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEventType;
/**
* Data source ingest module that runs Plaso against the image.
*/
public class PlasoIngestModule implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(PlasoIngestModule.class.getName());
private static final String MODULE_NAME = PlasoModuleFactory.getModuleName();
private static final String PLASO = "plaso"; //NON-NLS
private static final String PLASO64 = "plaso-20180818-amd64";//NON-NLS
private static final String PLASO32 = "plaso-20180818-win32";//NON-NLS
private static final String LOG2TIMELINE_EXECUTABLE = "Log2timeline.exe";//NON-NLS
private static final String PSORT_EXECUTABLE = "psort.exe";//NON-NLS
private static final String COOKIE = "cookie";//NON-NLS
private static final int LOG2TIMELINE_WORKERS = 2;
private File log2TimeLineExecutable;
private File psortExecutable;
private final PlasoModuleSettings settings;
private IngestJobContext context;
private Case currentCase;
private FileManager fileManager;
private Image image;
private AbstractFile previousFile = null; // cache used when looking up files in Autopsy DB
PlasoIngestModule(PlasoModuleSettings settings) {
this.settings = settings;
}
@NbBundle.Messages({
"PlasoIngestModule.executable.not.found=Plaso Executable Not Found.",
"PlasoIngestModule.requires.windows=Plaso module requires windows.",
"PlasoIngestModule.dataSource.not.an.image=Datasource is not an Image."})
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
if (false == PlatformUtil.isWindowsOS()) {
throw new IngestModuleException(Bundle.PlasoIngestModule_requires_windows());
}
try {
log2TimeLineExecutable = locateExecutable(LOG2TIMELINE_EXECUTABLE);
psortExecutable = locateExecutable(PSORT_EXECUTABLE);
} catch (FileNotFoundException exception) {
logger.log(Level.WARNING, "Plaso executable not found.", exception); //NON-NLS
throw new IngestModuleException(Bundle.PlasoIngestModule_executable_not_found(), exception);
}
Content dataSource = context.getDataSource();
if (!(dataSource instanceof Image)) {
throw new IngestModuleException(Bundle.PlasoIngestModule_dataSource_not_an_image());
}
image = (Image) dataSource;
}
@NbBundle.Messages({
"PlasoIngestModule.error.running.log2timeline=Error running log2timeline, see log file.",
"PlasoIngestModule.error.running.psort=Error running Psort, see log file.",
"PlasoIngestModule.error.creating.output.dir=Error creating Plaso module output directory.",
"PlasoIngestModule.starting.log2timeline=Starting Log2timeline",
"PlasoIngestModule.running.psort=Running Psort",
"PlasoIngestModule.log2timeline.cancelled=Log2timeline run was canceled",
"PlasoIngestModule.psort.cancelled=psort run was canceled",
"PlasoIngestModule.bad.imageFile=Cannot find image file name and path",
"PlasoIngestModule.completed=Plaso Processing Completed",
"PlasoIngestModule.has.run=Plaso Plugin has been run.",
"PlasoIngestModule.psort.fail=Plaso returned an error when sorting events. Results are not complete."})
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
assert dataSource.equals(image);
statusHelper.switchToDeterminate(100);
currentCase = Case.getCurrentCase();
fileManager = currentCase.getServices().getFileManager();
String currentTime = new SimpleDateFormat("yyyy-MM-dd HH-mm-ss z", Locale.US).format(System.currentTimeMillis());//NON-NLS
Path moduleOutputPath = Paths.get(currentCase.getModuleDirectory(), PLASO, currentTime);
try {
Files.createDirectories(moduleOutputPath);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error creating Plaso module output directory.", ex); //NON-NLS
return ProcessResult.ERROR;
}
// Run log2timeline
logger.log(Level.INFO, "Starting Plaso Run.");//NON-NLS
statusHelper.progress(Bundle.PlasoIngestModule_starting_log2timeline(), 0);
ProcessBuilder log2TimeLineCommand = buildLog2TimeLineCommand(moduleOutputPath, image);
try {
Process log2TimeLineProcess = log2TimeLineCommand.start();
try (BufferedReader log2TimeLineOutpout = new BufferedReader(new InputStreamReader(log2TimeLineProcess.getInputStream()))) {
L2TStatusProcessor statusReader = new L2TStatusProcessor(log2TimeLineOutpout, statusHelper, moduleOutputPath);
new Thread(statusReader, "log2timeline status reader").start(); //NON-NLS
ExecUtil.waitForTermination(LOG2TIMELINE_EXECUTABLE, log2TimeLineProcess, new DataSourceIngestModuleProcessTerminator(context));
statusReader.cancel();
}
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Log2timeline run was canceled"); //NON-NLS
return ProcessResult.OK;
}
if (Files.notExists(moduleOutputPath.resolve(PLASO))) {
logger.log(Level.WARNING, "Error running log2timeline: there was no storage file."); //NON-NLS
return ProcessResult.ERROR;
}
// sort the output
statusHelper.progress(Bundle.PlasoIngestModule_running_psort(), 33);
ProcessBuilder psortCommand = buildPsortCommand(moduleOutputPath);
int result = ExecUtil.execute(psortCommand, new DataSourceIngestModuleProcessTerminator(context));
if (result != 0) {
logger.log(Level.SEVERE, String.format("Error running Psort, error code returned %d", result)); //NON-NLS
MessageNotifyUtil.Notify.error(MODULE_NAME, Bundle.PlasoIngestModule_psort_fail());
return ProcessResult.ERROR;
}
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "psort run was canceled"); //NON-NLS
return ProcessResult.OK;
}
Path plasoFile = moduleOutputPath.resolve("plasodb.db3"); //NON-NLS
if (Files.notExists(plasoFile)) {
logger.log(Level.SEVERE, "Error running Psort: there was no sqlite db file."); //NON-NLS
return ProcessResult.ERROR;
}
// parse the output and make artifacts
createPlasoArtifacts(plasoFile.toString(), statusHelper);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error running Plaso.", ex);//NON-NLS
return ProcessResult.ERROR;
}
IngestMessage message = IngestMessage.createMessage(IngestMessage.MessageType.DATA,
Bundle.PlasoIngestModule_has_run(),
Bundle.PlasoIngestModule_completed());
IngestServices.getInstance().postMessage(message);
return ProcessResult.OK;
}
private ProcessBuilder buildLog2TimeLineCommand(Path moduleOutputPath, Image image) {
//make a csv list of disabled parsers.
String parsersString = settings.getParsers().entrySet().stream()
.filter(entry -> entry.getValue() == false)
.map(entry -> "!" + entry.getKey()) // '!' prepended to parsername disables it. //NON-NLS
.collect(Collectors.joining(","));//NON-NLS
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
"\"" + log2TimeLineExecutable + "\"", //NON-NLS
"--vss-stores", "all", //NON-NLS
"-z", image.getTimeZone(), //NON-NLS
"--partitions", "all", //NON-NLS
"--hasher_file_size_limit", "1", //NON-NLS
"--hashers", "none", //NON-NLS
"--parsers", "\"" + parsersString + "\"",//NON-NLS
"--no_dependencies_check", //NON-NLS
"--workers", String.valueOf(LOG2TIMELINE_WORKERS),//NON-NLS
moduleOutputPath.resolve(PLASO).toString(),
image.getPaths()[0]
);
processBuilder.redirectError(moduleOutputPath.resolve("log2timeline_err.txt").toFile()); //NON-NLS
return processBuilder;
}
static private ProcessBuilder buildProcessWithRunAsInvoker(String... commandLine) {
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
/* Add an environment variable to force log2timeline/psort to run with
* the same permissions Autopsy uses. */
processBuilder.environment().put("__COMPAT_LAYER", "RunAsInvoker"); //NON-NLS
return processBuilder;
}
private ProcessBuilder buildPsortCommand(Path moduleOutputPath) {
ProcessBuilder processBuilder = buildProcessWithRunAsInvoker(
"\"" + psortExecutable + "\"", //NON-NLS
"-o", "4n6time_sqlite", //NON-NLS
"-w", moduleOutputPath.resolve("plasodb.db3").toString(), //NON-NLS
moduleOutputPath.resolve(PLASO).toString()
);
processBuilder.redirectOutput(moduleOutputPath.resolve("psort_output.txt").toFile()); //NON-NLS
processBuilder.redirectError(moduleOutputPath.resolve("psort_err.txt").toFile()); //NON-NLS
return processBuilder;
}
private static File locateExecutable(String executableName) throws FileNotFoundException {
String architectureFolder = PlatformUtil.is64BitOS() ? PLASO64 : PLASO32;
String executableToFindName = Paths.get(PLASO, architectureFolder, executableName).toString();
File exeFile = InstalledFileLocator.getDefault().locate(executableToFindName, PlasoIngestModule.class.getPackage().getName(), false);
if (null == exeFile || exeFile.canExecute() == false) {
throw new FileNotFoundException(executableName + " executable not found.");
}
return exeFile;
}
@NbBundle.Messages({
"PlasoIngestModule.exception.posting.artifact=Exception Posting artifact.",
"PlasoIngestModule.event.datetime=Event Date Time",
"PlasoIngestModule.event.description=Event Description",
"PlasoIngestModule.create.artifacts.cancelled=Cancelled Plaso Artifact Creation ",
"# {0} - file that events are from",
"PlasoIngestModule.artifact.progress=Adding events to case: {0}",
"PlasoIngestModule.info.empty.database=Plaso database was empty.",
})
private void createPlasoArtifacts(String plasoDb, DataSourceIngestModuleProgress statusHelper) {
Blackboard blackboard = currentCase.getSleuthkitCase().getBlackboard();
String sqlStatement = "SELECT substr(filename,1) AS filename, "
+ " strftime('%s', datetime) AS epoch_date, "
+ " description, "
+ " source, "
+ " type, "
+ " sourcetype "
+ " FROM log2timeline "
+ " WHERE source NOT IN ('FILE', "
+ " 'WEBHIST') " // bad dates and duplicates with what we have.
+ " AND sourcetype NOT IN ('UNKNOWN', "
+ " 'PE Import Time');"; // lots of bad dates //NON-NLS
try (SQLiteDBConnect tempdbconnect = new SQLiteDBConnect("org.sqlite.JDBC", "jdbc:sqlite:" + plasoDb); //NON-NLS
ResultSet resultSet = tempdbconnect.executeQry(sqlStatement)) {
boolean dbHasData = false;
while (resultSet.next()) {
dbHasData = true;
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Cancelled Plaso Artifact Creation."); //NON-NLS
return;
}
String currentFileName = resultSet.getString("filename"); //NON-NLS
statusHelper.progress(Bundle.PlasoIngestModule_artifact_progress(currentFileName), 66);
Content resolvedFile = getAbstractFile(currentFileName);
if (resolvedFile == null) {
logger.log(Level.INFO, "File {0} from Plaso output not found in case. Associating it with the data source instead.", currentFileName);//NON-NLS
resolvedFile = image;
}
String description = resultSet.getString("description");
TimelineEventType eventType = findEventSubtype(currentFileName, resultSet);
// If the description is empty use the event type display name
// as the description.
if ( description == null || description.isEmpty() ) {
if (eventType != TimelineEventType.OTHER) {
description = eventType.getDisplayName();
} else {
continue;
}
}
Collection<BlackboardAttribute> bbattributes = Arrays.asList(
new BlackboardAttribute(
TSK_DATETIME, MODULE_NAME,
resultSet.getLong("epoch_date")), //NON-NLS
new BlackboardAttribute(
TSK_DESCRIPTION, MODULE_NAME,
description),//NON-NLS
new BlackboardAttribute(
TSK_TL_EVENT_TYPE, MODULE_NAME,
eventType.getTypeID()));
try {
BlackboardArtifact bbart = resolvedFile.newArtifact(TSK_TL_EVENT);
bbart.addAttributes(bbattributes);
try {
/* Post the artifact which will index the artifact for
* keyword search, and fire an event to notify UI of
* this new artifact */
blackboard.postArtifact(bbart, MODULE_NAME);
} catch (BlackboardException ex) {
logger.log(Level.SEVERE, "Error Posting Artifact.", ex);//NON-NLS
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception Adding Artifact.", ex);//NON-NLS
}
}
// Check if there is data the db
if( !dbHasData ) {
logger.log(Level.INFO, String.format("PlasoDB was empty: %s", plasoDb));
MessageNotifyUtil.Notify.info(MODULE_NAME, Bundle.PlasoIngestModule_info_empty_database());
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error while trying to read into a sqlite db.", ex);//NON-NLS
}
}
private AbstractFile getAbstractFile(String file) {
Path path = Paths.get(file);
String fileName = path.getFileName().toString();
String filePath = path.getParent().toString().replaceAll("\\\\", "/");//NON-NLS
if (filePath.endsWith("/") == false) {//NON-NLS
filePath += "/";//NON-NLS
}
// check the cached file
//TODO: would we reduce 'cache misses' if we retrieved the events sorted by file? Is that overhead worth it?
if (previousFile != null
&& previousFile.getName().equalsIgnoreCase(fileName)
&& previousFile.getParentPath().equalsIgnoreCase(filePath)) {
return previousFile;
}
try {
List<AbstractFile> abstractFiles = fileManager.findFiles(fileName, filePath);
if (abstractFiles.size() == 1) {// TODO: why do we bother with this check. also we don't cache the file...
return abstractFiles.get(0);
}
for (AbstractFile resolvedFile : abstractFiles) {
// double check its an exact match
if (filePath.equalsIgnoreCase(resolvedFile.getParentPath())) {
// cache it for next time
previousFile = resolvedFile;
return resolvedFile;
}
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception finding file.", ex);
}
return null;
}
/**
* Determine the event_type_id of the event from the plaso information.
*
* @param fileName The name of the file this event is from.
* @param row The row returned from the log2timeline table of th eplaso
* output.
*
* @return the event_type_id of the EventType of the given event.
*
* @throws SQLException
*/
private TimelineEventType findEventSubtype(String fileName, ResultSet row) throws SQLException {
switch (row.getString("source")) {
case "WEBHIST": //These shouldn't actually be present, but keeping the logic just in case...
if (fileName.toLowerCase().contains(COOKIE)
|| row.getString("type").toLowerCase().contains(COOKIE)) {//NON-NLS
return TimelineEventType.WEB_COOKIE;
} else {
return TimelineEventType.WEB_HISTORY;
}
case "EVT":
case "LOG":
return TimelineEventType.LOG_ENTRY;
case "REG":
switch (row.getString("sourcetype").toLowerCase()) {//NON-NLS
case "unknown : usb entries":
case "unknown : usbstor entries":
return TimelineEventType.DEVICES_ATTACHED;
default:
return TimelineEventType.REGISTRY;
}
default:
return TimelineEventType.OTHER;
}
}
/**
* Runs in a thread and reads the output of log2timeline. It redirectes the
* output both to a log file, and to the status message of the Plaso ingest
* module progress bar.
*/
private static class L2TStatusProcessor implements Runnable, Cancellable {
private final BufferedReader log2TimeLineOutpout;
private final DataSourceIngestModuleProgress statusHelper;
volatile private boolean cancelled = false;
private final Path outputPath;
private L2TStatusProcessor(BufferedReader log2TimeLineOutpout, DataSourceIngestModuleProgress statusHelper, Path outputPath) throws IOException {
this.log2TimeLineOutpout = log2TimeLineOutpout;
this.statusHelper = statusHelper;
this.outputPath = outputPath;
}
@Override
public void run() {
try (BufferedWriter writer = Files.newBufferedWriter(outputPath.resolve("log2timeline_output.txt"));) {//NON-NLS
String line = log2TimeLineOutpout.readLine();
while (cancelled == false && nonNull(line)) {
statusHelper.progress(line);
writer.write(line);
writer.newLine();
line = log2TimeLineOutpout.readLine();
}
writer.flush();
} catch (IOException ex) {
logger.log(Level.WARNING, "Error reading log2timeline output stream.", ex);//NON-NLS
}
}
@Override
public boolean cancel() {
cancelled = true;
return true;
}
}
}

View File

@ -0,0 +1,112 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* A factory that creates data source ingest modules that run Plaso against an
* image and saves the storage file to module output.
*/
@ServiceProvider(service = IngestModuleFactory.class)
@NbBundle.Messages({"PlasoModuleFactory.ingestJobSettings.exception.msg=Expected settings argument to be instanceof PlasoModuleSettings"})
public class PlasoModuleFactory implements IngestModuleFactory {
@NbBundle.Messages({"PlasoModuleFactory_moduleName=Plaso"})
static String getModuleName() {
return Bundle.PlasoModuleFactory_moduleName();
}
@Override
public String getModuleDisplayName() {
return getModuleName();
}
@NbBundle.Messages({"PlasoModuleFactory_moduleDesc=Runs Plaso against a Data Source."})
@Override
public String getModuleDescription() {
return Bundle.PlasoModuleFactory_moduleDesc();
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public boolean isDataSourceIngestModuleFactory() {
return true;
}
@Override
public DataSourceIngestModule createDataSourceIngestModule(IngestModuleIngestJobSettings settings) {
assert settings instanceof PlasoModuleSettings;
if (settings instanceof PlasoModuleSettings) {
return new PlasoIngestModule((PlasoModuleSettings) settings);
}
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
}
@Override
public boolean hasGlobalSettingsPanel() {
return false;
}
@Override
public IngestModuleGlobalSettingsPanel getGlobalSettingsPanel() {
throw new UnsupportedOperationException();
}
@Override
public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
return new PlasoModuleSettings();
}
@Override
public boolean hasIngestJobSettingsPanel() {
return true;
}
@Override
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
assert settings instanceof PlasoModuleSettings;
if (settings instanceof PlasoModuleSettings) {
return new PlasoModuleSettingsPanel((PlasoModuleSettings) settings);
}
throw new IllegalArgumentException(Bundle.PlasoModuleFactory_ingestJobSettings_exception_msg());
}
@Override
public boolean isFileIngestModuleFactory() {
return false;
}
@Override
public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,92 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import com.google.common.collect.ImmutableMap;
import java.util.HashMap;
import java.util.Map;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
/**
* Settings for the Plaso Ingest Module.
*/
public class PlasoModuleSettings implements IngestModuleIngestJobSettings {
private static final long serialVersionUID = 1L;
/** Map from parser name (or match pattern) to its enabled state. */
final Map<String, Boolean> parsers = new HashMap<>();
/**
* Get an immutable map from parser name to its enabled state. Parsers
* mapped to true or with no entry will be enabled. Parsers mapped to false,
* will be disabled.
*/
Map<String, Boolean> getParsers() {
return ImmutableMap.copyOf(parsers);
}
/**
* Constructor. The PlasoModuleSettings will have the default parsers
* (winreg, pe, chrome, firefox, internet explorer) disabled.
*/
public PlasoModuleSettings() {
parsers.put("winreg", false);
parsers.put("pe", false);
//chrome
parsers.put("chrome_preferences", false);
parsers.put("chrome_cache", false);
parsers.put("chrome_27_history", false);
parsers.put("chrome_8_history", false);
parsers.put("chrome_cookies", false);
parsers.put("chrome_extension_activity", false);
//firefox
parsers.put("firefox_cache", false);
parsers.put("firefox_cache2", false);
parsers.put("firefox_cookies", false);
parsers.put("firefox_downloads", false);
parsers.put("firefox_history", false);
//Internet Explorer
parsers.put("msiecf", false);
parsers.put("msie_webcache", false);
}
/**
* Gets the serialization version number.
*
* @return A serialization version number.
*/
@Override
public long getVersionNumber() {
return serialVersionUID;
}
/**
* Set the given parser enabled/disabled
*
* @param parserName The name of the parser to enable/disable
* @param selected The new state (enabled/disabled) for the given parser.
*/
void setParserEnabled(String parserName, boolean selected) {
parsers.put(parserName, selected);
}
}

View File

@ -0,0 +1,104 @@
<?xml version="1.0" encoding="UTF-8" ?>
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
<NonVisualComponents>
<Component class="javax.swing.JFileChooser" name="jFileChooser1">
</Component>
</NonVisualComponents>
<AuxValues>
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
<AuxValue name="designerSize" type="java.awt.Dimension" value="-84,-19,0,5,115,114,0,18,106,97,118,97,46,97,119,116,46,68,105,109,101,110,115,105,111,110,65,-114,-39,-41,-84,95,68,20,2,0,2,73,0,6,104,101,105,103,104,116,73,0,5,119,105,100,116,104,120,112,0,0,1,-68,0,0,1,-36"/>
</AuxValues>
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout"/>
<SubComponents>
<Component class="javax.swing.JCheckBox" name="winRegCheckBox">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.winRegCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="winRegCheckBoxActionPerformed"/>
</Events>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="2" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="15" insetsBottom="5" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JCheckBox" name="peCheckBox">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.peCheckBox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Events>
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="peCheckBoxActionPerformed"/>
</Events>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="3" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="0" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JTextArea" name="plasoParserInfoTextArea">
<Properties>
<Property name="editable" type="boolean" value="false"/>
<Property name="background" type="java.awt.Color" editor="org.netbeans.beaninfo.editors.ColorEditor">
<Color blue="f0" green="f0" id="Panel.background" palette="3" red="f0" type="palette"/>
</Property>
<Property name="columns" type="int" value="20"/>
<Property name="lineWrap" type="boolean" value="true"/>
<Property name="rows" type="int" value="1"/>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.plasoParserInfoTextArea.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="wrapStyleWord" type="boolean" value="true"/>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[160, 50]"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="1" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="5" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JLabel" name="noteLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.noteLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="0" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="9" insetsLeft="15" insetsBottom="9" insetsRight="15" anchor="18" weightX="1.0" weightY="0.0"/>
</Constraint>
</Constraints>
</Component>
<Component class="javax.swing.JLabel" name="disabledNoteLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/modules/plaso/Bundle.properties" key="PlasoModuleSettingsPanel.disabledNoteLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout" value="org.netbeans.modules.form.compat2.layouts.DesignGridBagLayout$GridBagConstraintsDescription">
<GridBagConstraints gridX="0" gridY="4" gridWidth="1" gridHeight="1" fill="2" ipadX="0" ipadY="0" insetsTop="5" insetsLeft="15" insetsBottom="0" insetsRight="15" anchor="18" weightX="1.0" weightY="1.0"/>
</Constraint>
</Constraints>
</Component>
</SubComponents>
</Form>

View File

@ -0,0 +1,146 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.modules.plaso;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* Settings panel for the PlasoIngestModule.
*/
public class PlasoModuleSettingsPanel extends IngestModuleIngestJobSettingsPanel {
private final PlasoModuleSettings settings;
public PlasoModuleSettingsPanel(PlasoModuleSettings settings) {
this.settings = settings;
initComponents();
}
/** This method is called from within the constructor to initialize the
* form. WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
jFileChooser1 = new javax.swing.JFileChooser();
winRegCheckBox = new javax.swing.JCheckBox();
peCheckBox = new javax.swing.JCheckBox();
plasoParserInfoTextArea = new javax.swing.JTextArea();
noteLabel = new javax.swing.JLabel();
disabledNoteLabel = new javax.swing.JLabel();
setLayout(new java.awt.GridBagLayout());
org.openide.awt.Mnemonics.setLocalizedText(winRegCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.winRegCheckBox.text")); // NOI18N
winRegCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
winRegCheckBoxActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 15, 5, 15);
add(winRegCheckBox, gridBagConstraints);
org.openide.awt.Mnemonics.setLocalizedText(peCheckBox, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.peCheckBox.text")); // NOI18N
peCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
peCheckBoxActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 15, 9, 15);
add(peCheckBox, gridBagConstraints);
plasoParserInfoTextArea.setEditable(false);
plasoParserInfoTextArea.setBackground(javax.swing.UIManager.getDefaults().getColor("Panel.background"));
plasoParserInfoTextArea.setColumns(20);
plasoParserInfoTextArea.setLineWrap(true);
plasoParserInfoTextArea.setRows(1);
plasoParserInfoTextArea.setText(org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.plasoParserInfoTextArea.text")); // NOI18N
plasoParserInfoTextArea.setWrapStyleWord(true);
plasoParserInfoTextArea.setBorder(null);
plasoParserInfoTextArea.setPreferredSize(new java.awt.Dimension(160, 50));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 15, 9, 15);
add(plasoParserInfoTextArea, gridBagConstraints);
org.openide.awt.Mnemonics.setLocalizedText(noteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.noteLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(9, 15, 9, 15);
add(noteLabel, gridBagConstraints);
org.openide.awt.Mnemonics.setLocalizedText(disabledNoteLabel, org.openide.util.NbBundle.getMessage(PlasoModuleSettingsPanel.class, "PlasoModuleSettingsPanel.disabledNoteLabel.text")); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 4;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(5, 15, 0, 15);
add(disabledNoteLabel, gridBagConstraints);
}// </editor-fold>//GEN-END:initComponents
private void winRegCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_winRegCheckBoxActionPerformed
settings.setParserEnabled("winreg", winRegCheckBox.isSelected());
}//GEN-LAST:event_winRegCheckBoxActionPerformed
private void peCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_peCheckBoxActionPerformed
settings.setParserEnabled("pe", peCheckBox.isSelected());
}//GEN-LAST:event_peCheckBoxActionPerformed
@Override
public IngestModuleIngestJobSettings getSettings() {
return settings;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel disabledNoteLabel;
private javax.swing.JFileChooser jFileChooser1;
private javax.swing.JLabel noteLabel;
private javax.swing.JCheckBox peCheckBox;
private javax.swing.JTextArea plasoParserInfoTextArea;
private javax.swing.JCheckBox winRegCheckBox;
// End of variables declaration//GEN-END:variables
}

View File

@ -1,15 +1,15 @@
/*
* Autopsy Forensic Browser
*
*
* Copyright 2013-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -18,18 +18,23 @@
*/
package org.sleuthkit.autopsy.modules.stix;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.logging.Level;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
@ -38,18 +43,20 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
class StixArtifactData {
private static final String MODULE_NAME = "Stix";
private AbstractFile file;
private final String observableId;
private final String objType;
private static final Logger logger = Logger.getLogger(StixArtifactData.class.getName());
public StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
StixArtifactData(AbstractFile a_file, String a_observableId, String a_objType) {
file = a_file;
observableId = a_observableId;
objType = a_objType;
}
public StixArtifactData(long a_objId, String a_observableId, String a_objType) {
StixArtifactData(long a_objId, String a_observableId, String a_objType) {
try {
Case case1 = Case.getCurrentCaseThrows();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
@ -62,39 +69,35 @@ class StixArtifactData {
}
@Messages({"StixArtifactData.indexError.message=Failed to index STIX interesting file hit artifact for keyword search.",
"StixArtifactData.noOpenCase.errMsg=No open case available."})
"StixArtifactData.noOpenCase.errMsg=No open case available."})
public void createArtifact(String a_title) throws TskCoreException {
Case currentCase;
Blackboard blackboard;
try {
currentCase = Case.getCurrentCaseThrows();
blackboard = Case.getCurrentCaseThrows().getSleuthkitCase().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_noOpenCase_errMsg(), ex.getLocalizedMessage());
return;
}
String setName;
if (a_title != null) {
setName = "STIX Indicator - " + a_title; //NON-NLS
} else {
setName = "STIX Indicator - (no title)"; //NON-NLS
}
Collection<BlackboardAttribute> attributes = new ArrayList<>();
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME, "Stix", setName)); //NON-NLS
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, "Stix", observableId)); //NON-NLS
attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CATEGORY, "Stix", objType)); //NON-NLS
org.sleuthkit.datamodel.Blackboard tskBlackboard = currentCase.getSleuthkitCase().getBlackboard();
String setName = "STIX Indicator - " + StringUtils.defaultIfBlank(a_title, "(no title)"); //NON-NLS
Collection<BlackboardAttribute> attributes = Arrays.asList(
new BlackboardAttribute(TSK_SET_NAME, MODULE_NAME, setName),
new BlackboardAttribute(TSK_TITLE, MODULE_NAME, observableId),
new BlackboardAttribute(TSK_CATEGORY, MODULE_NAME, objType));
// Create artifact if it doesn't already exist.
if (!tskBlackboard.artifactExists(file, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact bba = file.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT);
if (!blackboard.artifactExists(file, TSK_INTERESTING_FILE_HIT, attributes)) {
BlackboardArtifact bba = file.newArtifact(TSK_INTERESTING_FILE_HIT);
bba.addAttributes(attributes);
try {
// index the artifact for keyword search
Blackboard blackboard = currentCase.getServices().getBlackboard();
blackboard.indexArtifact(bba);
/*
* post the artifact which will index the artifact for keyword
* search, and fire an event to notify UI of this new artifact
*/
blackboard.postArtifact(bba, MODULE_NAME);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + bba.getArtifactID(), ex); //NON-NLS
MessageNotifyUtil.Notify.error(Bundle.StixArtifactData_indexError_message(), bba.getDisplayName());

View File

@ -41,7 +41,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.datamodel.utils.FileTypeUtils.FileTypeCategory;
import org.sleuthkit.autopsy.coreutils.FileTypeUtils.FileTypeCategory;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;

View File

@ -23,7 +23,7 @@ import java.util.List;
import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.Content;
@ -65,7 +65,7 @@ final class CustomArtifactType {
* @throws BlackboardException If there is an error adding any of the types.
*/
static void addToCaseDatabase() throws Blackboard.BlackboardException, NoCurrentCaseException {
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getArtifactsBlackboard();
artifactType = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAME, ARTIFACT_DISPLAY_NAME);
intAttrType = blackboard.getOrAddAttributeType(INT_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER, INT_ATTR_DISPLAY_NAME);
doubleAttrType = blackboard.getOrAddAttributeType(DOUBLE_ATTR_TYPE_NAME, BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE, DOUBLE_ATTR_DISPLAY_NAME);

View File

@ -21,11 +21,11 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;

View File

@ -21,10 +21,10 @@ package org.sleuthkit.autopsy.test;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.TskCoreException;

View File

@ -26,17 +26,17 @@ import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
/**
* A file ingest module that creates some interestng artifacts
* A file ingest module that creates some interesting artifacts
* with attributes based on files for test purposes.
*/
@NbBundle.Messages({
@ -55,7 +55,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
try {
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getArtifactsBlackboard();
artifactType = blackboard.getOrAddArtifactType(INT_ARTIFACT_TYPE_NAME, INT_ARTIFACT_DISPLAY_NAME);
} catch (Blackboard.BlackboardException | NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.InterestingArtifactCreatorIngestModule_exceptionMessage_errorCreatingCustomType(), ex);
@ -77,7 +77,7 @@ final class InterestingArtifactCreatorIngestModule extends FileIngestModuleAdapt
* type.
*/
int randomArtIndex = (int) (Math.random() * 3);
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getBlackboard();
Blackboard blackboard = Case.getCurrentCaseThrows().getServices().getArtifactsBlackboard();
BlackboardArtifact.Type artifactTypeBase = blackboard.getOrAddArtifactType(ARTIFACT_TYPE_NAMES[randomArtIndex], ARTIFACT_DISPLAY_NAMES[randomArtIndex]);
BlackboardArtifact artifactBase = file.newArtifact(artifactTypeBase.getTypeID());
Collection<BlackboardAttribute> baseAttributes = new ArrayList<>();

View File

@ -0,0 +1,8 @@
OptionsCategory_Name_Machine_Translation=Machine Translation
OptionsCategory_Keywords_Machine_Translation_Settings=Machine Translation Settings
TranslationContentPanel.ocrLabel.text=OCR:
TranslationOptionsPanelController.moduleErr=Module Error
TranslationOptionsPanelController.moduleErr.msg=A module caused an error listening to TranslationSettingsPanelController updates. See log to determine which module. Some data could be incomplete.
TranslationContentPanel.showLabel.text=Show:
TranslationOptionsPanel.translationServiceLabel.text=Text translator:
TranslationOptionsPanel.translationOptionsDescription.text=Configure a 3rd party text translation service to enable text and file name translation.

View File

@ -0,0 +1,12 @@
OptionsCategory_Name_Machine_Translation=Machine Translation
OptionsCategory_Keywords_Machine_Translation_Settings=Machine Translation Settings
TranslationContentPanel.ocrLabel.text=OCR:
TranslationOptionsPanel.noTextTranslators.text=No text translators exist, translation is disabled.
TranslationOptionsPanel.noTextTranslatorSelected.text=No text translator selected, translation is disabled.
TranslationOptionsPanel.textTranslatorsUnavailable.text=Unable to get selected text translator, translation is disabled.
TranslationOptionsPanel.translationDisabled.text=Translation disabled
TranslationOptionsPanelController.moduleErr=Module Error
TranslationOptionsPanelController.moduleErr.msg=A module caused an error listening to TranslationSettingsPanelController updates. See log to determine which module. Some data could be incomplete.
TranslationContentPanel.showLabel.text=Show:
TranslationOptionsPanel.translationServiceLabel.text=Text translator:
TranslationOptionsPanel.translationOptionsDescription.text=Configure a 3rd party text translation service to enable text and file name translation.

View File

@ -23,6 +23,7 @@ import java.util.Collections;
import java.util.Optional;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.core.UserPreferences;
import javax.annotation.concurrent.GuardedBy;
/**
* Performs a lookup for a TextTranslator service provider and if present, will
@ -33,6 +34,7 @@ public final class TextTranslationService {
private final static TextTranslationService tts = new TextTranslationService();
private final Collection<? extends TextTranslator> translators;
@GuardedBy("this")
private Optional<TextTranslator> selectedTranslator;
private TextTranslationService() {
@ -50,7 +52,7 @@ public final class TextTranslationService {
* Update the translator currently in use to match the one saved to the user
* preferences
*/
public void updateSelectedTranslator() {
synchronized void updateSelectedTranslator() {
String translatorName = UserPreferences.getTextTranslatorName();
for (TextTranslator translator : translators) {
if (translator.getName().equals(translatorName)) {
@ -75,7 +77,7 @@ public final class TextTranslationService {
* when specific translation
* implementations fail
*/
public String translate(String input) throws NoServiceProviderException, TranslationException {
public synchronized String translate(String input) throws NoServiceProviderException, TranslationException {
if (hasProvider()) {
return selectedTranslator.get().translate(input);
}
@ -92,7 +94,7 @@ public final class TextTranslationService {
*
* @throws NoServiceProviderException
*/
public TextTranslator getTranslatorByName(String translatorName) throws NoServiceProviderException {
TextTranslator getTranslatorByName(String translatorName) throws NoServiceProviderException {
for (TextTranslator translator : translators) {
if (translator.getName().equals(translatorName)) {
return translator;
@ -107,7 +109,7 @@ public final class TextTranslationService {
*
* @return an unmodifiable collection of TextTranslators
*/
public Collection<? extends TextTranslator> getTranslators() {
Collection<? extends TextTranslator> getTranslators() {
return Collections.unmodifiableCollection(translators);
}
@ -117,16 +119,16 @@ public final class TextTranslationService {
*
* @return
*/
public boolean hasProvider() {
public synchronized boolean hasProvider() {
return selectedTranslator.isPresent();
}
/**
* Returns the hard limit for translation request sizes.
*
* @return
* Gets the maximum number of characters allowed in a translation request.
*
* @return The maximum character count.
*/
public int getMaxPayloadSize() {
return selectedTranslator.get().getMaxPayloadSize();
public synchronized int getMaxTextChars() {
return selectedTranslator.get().getMaxTextChars();
}
}

View File

@ -18,7 +18,7 @@
*/
package org.sleuthkit.autopsy.texttranslation;
import java.awt.Component;
import javax.swing.JPanel;
/**
* Interface for creating text translators. Implementing classes will be picked
@ -45,22 +45,24 @@ public interface TextTranslator {
String getName();
/**
* Get the component to display on the settings options panel when this
* Get the JPanel to display on the settings options panel when this
* TextTranslator is selected
*
* @return the component which displays the settings options
* @return the panel which displays the settings options
*/
Component getComponent();
JPanel getSettingsPanel();
/**
* Save the settings as they have been modified in the component.
*/
void saveSettings();
/**
* Returns the hard limit for translation request sizes.
* Saves the current state of the settings in the settings panel.
*
* @return
* @throws TranslationConfigException
*/
int getMaxPayloadSize();
void saveSettings() throws TranslationConfigException;
/**
* Gets the maximum number of characters allowed in a translation request.
*
* @return The maximum character count.
*/
int getMaxTextChars();
}

View File

@ -0,0 +1,48 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.texttranslation;
/**
* Instances of this exception class are thrown when there is an error
* configuring text translation.
*/
public class TranslationConfigException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs a new exception with the specified message.
*
* @param message The message.
*/
public TranslationConfigException(String message) {
super(message);
}
/**
* Constructs a new exception with the specified message and cause.
*
* @param message The message.
* @param cause The cause.
*/
public TranslationConfigException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@ -22,13 +22,8 @@ package org.sleuthkit.autopsy.texttranslation;
* Provides a system exception for Text Translation errors
*/
public class TranslationException extends Exception {
/**
* Constructs a new exception with null as its message.
*/
public TranslationException() {
super();
}
private static final long serialVersionUID = 1L;
/**
* Constructs a new exception with the specified message.

View File

@ -61,7 +61,7 @@
<Component class="javax.swing.JLabel" name="translationServiceLabel">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/texttranslation/ui/Bundle.properties" key="TranslationOptionsPanel.translationServiceLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
<ResourceString bundle="org/sleuthkit/autopsy/texttranslation/Bundle.properties" key="TranslationOptionsPanel.translationServiceLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
</Component>
@ -72,7 +72,7 @@
<Component class="javax.swing.JLabel" name="translationOptionsDescription">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/texttranslation/ui/Bundle.properties" key="TranslationOptionsPanel.translationOptionsDescription.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
<ResourceString bundle="org/sleuthkit/autopsy/texttranslation/Bundle.properties" key="TranslationOptionsPanel.translationOptionsDescription.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
</Component>

View File

@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.texttranslation.ui;
package org.sleuthkit.autopsy.texttranslation;
import java.awt.BorderLayout;
import java.awt.Color;
@ -27,14 +27,12 @@ import java.util.logging.Level;
import javax.swing.JLabel;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.core.UserPreferences;
import org.sleuthkit.autopsy.texttranslation.NoServiceProviderException;
import org.sleuthkit.autopsy.texttranslation.TextTranslationService;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Options panel to display translation options
*/
public class TranslationOptionsPanel extends javax.swing.JPanel {
final class TranslationOptionsPanel extends javax.swing.JPanel {
private final static Logger logger = Logger.getLogger(TranslationOptionsPanel.class.getName());
private static final long serialVersionUID = 1L;
@ -45,7 +43,7 @@ public class TranslationOptionsPanel extends javax.swing.JPanel {
* Creates new form TranslationOptionsPanel
*/
@Messages({"TranslationOptionsPanel.translationDisabled.text=Translation disabled"})
public TranslationOptionsPanel(TranslationOptionsPanelController theController) {
TranslationOptionsPanel(TranslationOptionsPanelController theController) {
initComponents();
controller = theController;
translatorComboBox.addItem(Bundle.TranslationOptionsPanel_translationDisabled_text());
@ -78,7 +76,7 @@ public class TranslationOptionsPanel extends javax.swing.JPanel {
translationServicePanel.removeAll();
if (translatorComboBox.getSelectedItem() != null && !translatorComboBox.getSelectedItem().toString().equals(Bundle.TranslationOptionsPanel_translationDisabled_text())) {
try {
Component panel = TextTranslationService.getInstance().getTranslatorByName(translatorComboBox.getSelectedItem().toString()).getComponent();
Component panel = TextTranslationService.getInstance().getTranslatorByName(translatorComboBox.getSelectedItem().toString()).getSettingsPanel();
panel.addPropertyChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
@ -126,7 +124,7 @@ public class TranslationOptionsPanel extends javax.swing.JPanel {
if (currentSelection != null && !currentSelection.equals(Bundle.TranslationOptionsPanel_translationDisabled_text())) {
try {
TextTranslationService.getInstance().getTranslatorByName(currentSelection).saveSettings();
} catch (NoServiceProviderException ex) {
} catch (NoServiceProviderException | TranslationConfigException ex) {
logger.log(Level.WARNING, "Unable to save settings for TextTranslator named: " + currentSelection, ex);
}
}
@ -172,7 +170,7 @@ public class TranslationOptionsPanel extends javax.swing.JPanel {
.addGap(10, 10, 10)
.addComponent(translatorComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, 214, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(translationOptionsDescription, javax.swing.GroupLayout.DEFAULT_SIZE, 462, Short.MAX_VALUE))
.addComponent(translationOptionsDescription, javax.swing.GroupLayout.PREFERRED_SIZE, 462, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(

View File

@ -16,7 +16,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.texttranslation.ui;
package org.sleuthkit.autopsy.texttranslation;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;

View File

@ -26,11 +26,12 @@ import com.squareup.okhttp.OkHttpClient;
import com.squareup.okhttp.Request;
import com.squareup.okhttp.RequestBody;
import com.squareup.okhttp.Response;
import java.awt.Component;
import java.io.IOException;
import javax.swing.JPanel;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.texttranslation.TextTranslator;
import org.sleuthkit.autopsy.texttranslation.TranslationConfigException;
import org.sleuthkit.autopsy.texttranslation.TranslationException;
/**
@ -132,12 +133,12 @@ public class BingTranslator implements TextTranslator {
}
@Override
public Component getComponent() {
public JPanel getSettingsPanel() {
return settingsPanel;
}
@Override
public void saveSettings() {
public void saveSettings() throws TranslationConfigException {
settings.setAuthenticationKey(settingsPanel.getAuthenticationKey());
settings.setTargetLanguageCode(settingsPanel.getTargetLanguageCode());
settings.saveSettings();
@ -173,7 +174,7 @@ public class BingTranslator implements TextTranslator {
}
@Override
public int getMaxPayloadSize() {
public int getMaxTextChars() {
return MAX_STRING_LENGTH;
}
}

View File

@ -23,7 +23,6 @@ import com.google.auth.oauth2.ServiceAccountCredentials;
import com.google.cloud.translate.Translate;
import com.google.cloud.translate.TranslateOptions;
import com.google.cloud.translate.Translation;
import java.awt.Component;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
@ -31,12 +30,14 @@ import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.logging.Level;
import javax.swing.JPanel;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.apache.commons.lang3.StringUtils;
import org.openide.util.NbBundle.Messages;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.texttranslation.TextTranslator;
import org.sleuthkit.autopsy.texttranslation.TranslationConfigException;
import org.sleuthkit.autopsy.texttranslation.TranslationException;
/**
@ -133,7 +134,7 @@ public final class GoogleTranslator implements TextTranslator {
}
@Override
public Component getComponent() {
public JPanel getSettingsPanel() {
return settingsPanel;
}
@ -172,7 +173,7 @@ public final class GoogleTranslator implements TextTranslator {
}
@Override
public void saveSettings() {
public void saveSettings() throws TranslationConfigException {
settings.setTargetLanguageCode(settingsPanel.getTargetLanguageCode());
settings.setCredentialPath(settingsPanel.getCredentialsPath());
settings.saveSettings();
@ -180,7 +181,7 @@ public final class GoogleTranslator implements TextTranslator {
}
@Override
public int getMaxPayloadSize() {
public int getMaxTextChars() {
return MAX_PAYLOAD_SIZE;
}
}

View File

@ -1,8 +1,6 @@
OptionsCategory_Name_Machine_Translation=Machine Translation
OptionsCategory_Keywords_Machine_Translation_Settings=Machine Translation Settings
TranslationContentPanel.ocrLabel.text=OCR:
TranslationOptionsPanel.translationServiceLabel.text=Text translator:
TranslationOptionsPanelController.moduleErr=Module Error
TranslationOptionsPanelController.moduleErr.msg=A module caused an error listening to TranslationSettingsPanelController updates. See log to determine which module. Some data could be incomplete.
TranslationOptionsPanel.translationOptionsDescription.text=Configure a 3rd party text translation service to enable text and file name translation.
TranslationContentPanel.showLabel.text=Show:

View File

@ -17,12 +17,6 @@ TranslatedTextViewer.title=Translation
TranslatedTextViewer.toolTip=Displays translated file text.
TranslationContentPanel.autoDetectOCR=Autodetect language
TranslationContentPanel.ocrLabel.text=OCR:
TranslationOptionsPanel.noTextTranslators.text=No text translators exist, translation is disabled.
TranslationOptionsPanel.noTextTranslatorSelected.text=No text translator selected, translation is disabled.
TranslationOptionsPanel.textTranslatorsUnavailable.text=Unable to get selected text translator, translation is disabled.
TranslationOptionsPanel.translationDisabled.text=Translation disabled
TranslationOptionsPanel.translationServiceLabel.text=Text translator:
TranslationOptionsPanelController.moduleErr=Module Error
TranslationOptionsPanelController.moduleErr.msg=A module caused an error listening to TranslationSettingsPanelController updates. See log to determine which module. Some data could be incomplete.
TranslationOptionsPanel.translationOptionsDescription.text=Configure a 3rd party text translation service to enable text and file name translation.
TranslationContentPanel.showLabel.text=Show:

View File

@ -96,7 +96,7 @@ public final class TranslatedTextViewer implements TextViewer {
}
}
int payloadMaxInKB = TextTranslationService.getInstance().getMaxPayloadSize() / 1000;
int payloadMaxInKB = TextTranslationService.getInstance().getMaxTextChars() / 1000;
panel.setWarningLabelMsg(String.format(Bundle.TranslatedTextViewer_maxPayloadSize(), payloadMaxInKB));
//Force a background task.

View File

View File

@ -2,6 +2,8 @@ CTL_MakeTimeline=Timeline
CTL_TimeLineTopComponentAction=TimeLineTopComponent
CTL_TimeLineTopComponent=Timeline
FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval.
FilteredEventsModel.timeRangeProperty.errorTitle=Timeline
OpenTimelineAction.displayName=Timeline
OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources.
OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.
@ -9,7 +11,7 @@ PrompDialogManager.buttonType.continueNoUpdate=Continue Without Updating
PrompDialogManager.buttonType.showTimeline=Continue
PrompDialogManager.buttonType.update=Update DB
PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?
PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.
PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.
PromptDialogManager.progressDialog.title=Populating Timeline Data
PromptDialogManager.rebuildPrompt.details=Details
PromptDialogManager.rebuildPrompt.headerText=The Timeline DB is incomplete and/or out of date. Some events may be missing or inaccurate and some features may be unavailable.
@ -25,6 +27,7 @@ ShowInTimelineDialog.fileTitle=View {0} in timeline.
ShowInTimelineDialog.showTimelineButtonType.text=Show Timeline
Timeline.dialogs.title=\ Timeline
Timeline.frameName.text={0} - Autopsy Timeline
Timeline.old.version=\ This Case was created with an older version of Autopsy.\nThe Timeline with not show events from data sources added with the older version of Autopsy
Timeline.resultsPanel.title=Timeline Results
Timeline.runJavaFxThread.progress.creating=Creating timeline . . .
Timeline.zoomOutButton.text=Zoom Out
@ -32,16 +35,6 @@ Timeline.goToButton.text=Go To:
Timeline.yearBarChart.x.years=Years
Timeline.resultPanel.loading=Loading...
TimeLineController.errorTitle=Timeline error.
TimeLineController.outOfDate.errorMessage=Error determing if the timeline is out of date. We will assume it should be updated. See the logs for more details.
TimeLineController.rebuildReasons.incompleteOldSchema=The Timeline events database was previously populated without incomplete information: Some features may be unavailable or non-functional unless you update the events database.
TimeLineController.rebuildReasons.ingestWasRunning=The Timeline events database was previously populated while ingest was running: Some events may be missing, incomplete, or inaccurate.
TimeLineController.rebuildReasons.outOfDate=The event data is out of date: Not all events will be visible.
TimeLineController.rebuildReasons.outOfDateError=Could not determine if the timeline data is out of date.
TimeLineController.setEventsDBStale.errMsgNotStale=Failed to mark the timeline db as not stale. Some results may be out of date or missing.
TimeLineController.setEventsDBStale.errMsgStale=Failed to mark the timeline db as stale. Some results may be out of date or missing.
TimeLinecontroller.setIngestRunning.errMsgNotRunning=Failed to mark the timeline db as populated while ingest was not running. Some results may be out of date or missing.
TimeLineController.setIngestRunning.errMsgRunning=Failed to mark the timeline db as populated while ingest was running. Some results may be out of date or missing.
TimeLinecontroller.updateNowQuestion=Do you want to update the events database now?
TimelineFrame.title=Timeline
TimelinePanel.jButton1.text=6m

View File

View File

View File

View File

View File

@ -0,0 +1,699 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.ImmutableList;
import com.google.common.eventbus.EventBus;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
import javafx.beans.InvalidationListener;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javafx.collections.ObservableSet;
import static org.apache.commons.collections4.CollectionUtils.emptyIfNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import org.joda.time.DateTimeZone;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.BlackBoardArtifactTagDeletedEvent.DeletedBlackboardArtifactTagInfo;
import org.sleuthkit.autopsy.casemodule.events.ContentTagAddedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent;
import org.sleuthkit.autopsy.casemodule.events.ContentTagDeletedEvent.DeletedContentTagInfo;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.events.AutopsyEvent;
import org.sleuthkit.autopsy.timeline.events.RefreshRequestedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsAddedEvent;
import org.sleuthkit.autopsy.timeline.events.TagsDeletedEvent;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.FilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.RootFilterState;
import org.sleuthkit.autopsy.timeline.ui.filtering.datamodel.TagsFilterState;
import org.sleuthkit.autopsy.timeline.utils.CacheLoaderImpl;
import org.sleuthkit.autopsy.timeline.utils.FilterUtils;
import org.sleuthkit.autopsy.timeline.zooming.ZoomState;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifactTag;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.ContentTag;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.Tag;
import org.sleuthkit.datamodel.TagName;
import org.sleuthkit.datamodel.TimelineManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEvent;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourceFilter;
import org.sleuthkit.datamodel.TimelineFilter.DataSourcesFilter;
import org.sleuthkit.datamodel.TimelineFilter.EventTypeFilter;
import org.sleuthkit.datamodel.TimelineFilter.FileTypesFilter;
import org.sleuthkit.datamodel.TimelineFilter.HashHitsFilter;
import org.sleuthkit.datamodel.TimelineFilter.HashSetFilter;
import org.sleuthkit.datamodel.TimelineFilter.HideKnownFilter;
import org.sleuthkit.datamodel.TimelineFilter.RootFilter;
import org.sleuthkit.datamodel.TimelineFilter.TagNameFilter;
import org.sleuthkit.datamodel.TimelineFilter.TagsFilter;
import org.sleuthkit.datamodel.TimelineFilter.TextFilter;
/**
* This class acts as the model for a TimelineView
*
* Views can register listeners on properties returned by methods.
*
* This class is implemented as a filtered view into an underlying
* TimelineManager.
*
* Maintainers, NOTE: as many methods as possible should cache their results so
* as to avoid unnecessary db calls through the TimelineManager -jm
*
* Concurrency Policy: TimelineManager is internally synchronized, so methods
* that only access the TimelineManager atomically do not need further
* synchronization. All other member state variables should only be accessed
* with intrinsic lock of containing FilteredEventsModel held.
*
*/
public final class FilteredEventsModel {
private static final Logger logger = Logger.getLogger(FilteredEventsModel.class.getName());
private final TimelineManager eventManager;
private final Case autoCase;
private final EventBus eventbus = new EventBus("FilteredEventsModel_EventBus"); //NON-NLS
//Filter and zoome state
private final ReadOnlyObjectWrapper<RootFilterState> requestedFilter = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<Interval> requestedTimeRange = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper<ZoomState> requestedZoomState = new ReadOnlyObjectWrapper<>();
private final ReadOnlyObjectWrapper< TimelineEventType.TypeLevel> requestedTypeZoom = new ReadOnlyObjectWrapper<>(TimelineEventType.TypeLevel.BASE_TYPE);
private final ReadOnlyObjectWrapper< TimelineEvent.DescriptionLevel> requestedLOD = new ReadOnlyObjectWrapper<>(TimelineEvent.DescriptionLevel.SHORT);
// end Filter and zoome state
//caches
private final LoadingCache<Object, Long> maxCache;
private final LoadingCache<Object, Long> minCache;
private final LoadingCache<Long, TimelineEvent> idToEventCache;
private final LoadingCache<ZoomState, Map<TimelineEventType, Long>> eventCountsCache;
/** Map from datasource id to datasource name. */
private final ObservableMap<Long, String> datasourcesMap = FXCollections.observableHashMap();
private final ObservableSet< String> hashSets = FXCollections.observableSet();
private final ObservableList<TagName> tagNames = FXCollections.observableArrayList();
// end caches
/**
* Make a DataSourceFilter from an entry from the datasourcesMap.
*
* @param dataSourceEntry A map entry from datasource id to datasource name.
*
* @return A new DataSourceFilter for the given datsourcesMap entry.
*/
private static DataSourceFilter newDataSourceFromMapEntry(Map.Entry<Long, String> dataSourceEntry) {
return new DataSourceFilter(dataSourceEntry.getValue(), dataSourceEntry.getKey());
}
public FilteredEventsModel(Case autoCase, ReadOnlyObjectProperty<ZoomState> currentStateProperty) throws TskCoreException {
this.autoCase = autoCase;
this.eventManager = autoCase.getSleuthkitCase().getTimelineManager();
populateFilterData();
//caches
idToEventCache = CacheBuilder.newBuilder()
.maximumSize(5000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(eventManager::getEventById));
eventCountsCache = CacheBuilder.newBuilder()
.maximumSize(1000L)
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoaderImpl<>(this::countEventsByType));
maxCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMaxTime()));
minCache = CacheBuilder.newBuilder()
.build(new CacheLoaderImpl<>(ignored -> eventManager.getMinTime()));
InvalidationListener filterSyncListener = observable -> {
RootFilterState rootFilter = filterProperty().get();
syncFilters(rootFilter);
requestedFilter.set(rootFilter.copyOf());
};
datasourcesMap.addListener(filterSyncListener);
hashSets.addListener(filterSyncListener);
tagNames.addListener(filterSyncListener);
requestedFilter.set(getDefaultFilter());
requestedZoomState.addListener(observable -> {
final ZoomState zoomState = requestedZoomState.get();
if (zoomState != null) {
synchronized (FilteredEventsModel.this) {
requestedTypeZoom.set(zoomState.getTypeZoomLevel());
requestedFilter.set(zoomState.getFilterState());
requestedTimeRange.set(zoomState.getTimeRange());
requestedLOD.set(zoomState.getDescriptionLOD());
}
}
});
requestedZoomState.bind(currentStateProperty);
}
/**
* get the count of all events that fit the given zoom params organized by
* the EvenType of the level specified in the zoomState
*
* @param zoomState The params that control what events to count and how to
* organize the returned map
*
* @return a map from event type( of the requested level) to event counts
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
private Map<TimelineEventType, Long> countEventsByType(ZoomState zoomState) throws TskCoreException {
if (zoomState.getTimeRange() == null) {
return Collections.emptyMap();
} else {
return eventManager.countEventsByType(zoomState.getTimeRange().getStartMillis() / 1000,
zoomState.getTimeRange().getEndMillis() / 1000,
zoomState.getFilterState().getActiveFilter(), zoomState.getTypeZoomLevel());
}
}
public TimelineManager getEventManager() {
return eventManager;
}
public SleuthkitCase getSleuthkitCase() {
return autoCase.getSleuthkitCase();
}
public Interval getBoundingEventsInterval(Interval timeRange, RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(timeRange, filter, timeZone);
}
/**
* Readonly observable property for the current ZoomState
*
* @return A readonly observable property for the current ZoomState.
*/
synchronized public ReadOnlyObjectProperty<ZoomState> zoomStateProperty() {
return requestedZoomState.getReadOnlyProperty();
}
/**
* Get the current ZoomState
*
* @return The current ZoomState
*/
synchronized public ZoomState getZoomState() {
return requestedZoomState.get();
}
/**
* Update the data used to determine the available filters.
*/
synchronized private void populateFilterData() throws TskCoreException {
SleuthkitCase skCase = autoCase.getSleuthkitCase();
hashSets.addAll(eventManager.getHashSetNames());
//because there is no way to remove a datasource we only add to this map.
for (DataSource ds : skCase.getDataSources()) {
datasourcesMap.putIfAbsent(ds.getId(), ds.getName());
}
//should this only be tags applied to files or event bearing artifacts?
tagNames.setAll(skCase.getTagNamesInUse());
}
/**
* "sync" the given root filter with the state of the casee: Disable filters
* for tags that are not in use in the case, and add new filters for tags,
* hashsets, and datasources, that don't have them. New filters are selected
* by default.
*
* @param rootFilterState the filter state to modify so it is consistent
* with the tags in use in the case
*/
public void syncFilters(RootFilterState rootFilterState) {
TagsFilterState tagsFilterState = rootFilterState.getTagsFilterState();
for (TagName tagName : tagNames) {
tagsFilterState.getFilter().addSubFilter(new TagNameFilter(tagName));
}
for (FilterState<? extends TagNameFilter> tagFilterState : rootFilterState.getTagsFilterState().getSubFilterStates()) {
// disable states for tag names that don't exist in case.
tagFilterState.setDisabled(tagNames.contains(tagFilterState.getFilter().getTagName()) == false);
}
DataSourcesFilter dataSourcesFilter = rootFilterState.getDataSourcesFilterState().getFilter();
datasourcesMap.entrySet().forEach(entry -> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(entry)));
HashHitsFilter hashSetsFilter = rootFilterState.getHashHitsFilterState().getFilter();
for (String hashSet : hashSets) {
hashSetsFilter.addSubFilter(new HashSetFilter(hashSet));
}
}
/**
* Get a read only view of the time range currently in view.
*
* @return A read only view of the time range currently in view.
*/
@NbBundle.Messages({
"FilteredEventsModel.timeRangeProperty.errorTitle=Timeline",
"FilteredEventsModel.timeRangeProperty.errorMessage=Error getting spanning interval."})
synchronized public ReadOnlyObjectProperty<Interval> timeRangeProperty() {
if (requestedTimeRange.get() == null) {
try {
requestedTimeRange.set(getSpanningInterval());
} catch (TskCoreException timelineCacheException) {
MessageNotifyUtil.Notify.error(Bundle.FilteredEventsModel_timeRangeProperty_errorTitle(),
Bundle.FilteredEventsModel_timeRangeProperty_errorMessage());
logger.log(Level.SEVERE, "Error getting spanning interval.", timelineCacheException);
}
}
return requestedTimeRange.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<TimelineEvent.DescriptionLevel> descriptionLODProperty() {
return requestedLOD.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<RootFilterState> filterProperty() {
return requestedFilter.getReadOnlyProperty();
}
synchronized public ReadOnlyObjectProperty<TimelineEventType.TypeLevel> eventTypeZoomProperty() {
return requestedTypeZoom.getReadOnlyProperty();
}
/**
* The time range currently in view.
*
* @return The time range currently in view.
*/
synchronized public Interval getTimeRange() {
return getZoomState().getTimeRange();
}
synchronized public TimelineEvent.DescriptionLevel getDescriptionLOD() {
return getZoomState().getDescriptionLOD();
}
synchronized public RootFilterState getFilterState() {
return getZoomState().getFilterState();
}
synchronized public TimelineEventType.TypeLevel getEventTypeZoom() {
return getZoomState().getTypeZoomLevel();
}
/** Get the default filter used at startup.
*
* @return the default filter used at startup
*/
public synchronized RootFilterState getDefaultFilter() {
DataSourcesFilter dataSourcesFilter = new DataSourcesFilter();
datasourcesMap.entrySet().forEach(dataSourceEntry
-> dataSourcesFilter.addSubFilter(newDataSourceFromMapEntry(dataSourceEntry)));
HashHitsFilter hashHitsFilter = new HashHitsFilter();
hashSets.stream().map(HashSetFilter::new).forEach(hashHitsFilter::addSubFilter);
TagsFilter tagsFilter = new TagsFilter();
tagNames.stream().map(TagNameFilter::new).forEach(tagsFilter::addSubFilter);
FileTypesFilter fileTypesFilter = FilterUtils.createDefaultFileTypesFilter();
return new RootFilterState(new RootFilter(new HideKnownFilter(),
tagsFilter,
hashHitsFilter,
new TextFilter(),
new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE),
dataSourcesFilter,
fileTypesFilter,
Collections.emptySet()));
}
public Interval getBoundingEventsInterval(DateTimeZone timeZone) throws TskCoreException {
return eventManager.getSpanningInterval(zoomStateProperty().get().getTimeRange(), getFilterState().getActiveFilter(), timeZone);
}
public TimelineEvent getEventById(Long eventID) throws TskCoreException {
try {
return idToEventCache.get(eventID);
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached event from ID", ex);
}
}
public Set<TimelineEvent> getEventsById(Collection<Long> eventIDs) throws TskCoreException {
Set<TimelineEvent> events = new HashSet<>();
for (Long id : eventIDs) {
events.add(getEventById(id));
}
return events;
}
/**
* get a count of tagnames applied to the given event ids as a map from
* tagname displayname to count of tag applications
*
* @param eventIDsWithTags the event ids to get the tag counts map for
*
* @return a map from tagname displayname to count of applications
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
return eventManager.getTagCountsByTagName(eventIDsWithTags);
}
public List<Long> getEventIDs(Interval timeRange, FilterState<? extends TimelineFilter> filter) throws TskCoreException {
final Interval overlap;
RootFilter intersection;
synchronized (this) {
overlap = getSpanningInterval().overlap(timeRange);
intersection = getFilterState().intersect(filter).getActiveFilter();
}
return eventManager.getEventIDs(overlap, intersection);
}
/**
* Return the number of events that pass the requested filter and are within
* the given time range.
*
* NOTE: this method does not change the requested time range
*
* @param timeRange
*
* @return
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Map<TimelineEventType, Long> getEventCounts(Interval timeRange) throws TskCoreException {
final RootFilterState filter;
final TimelineEventType.TypeLevel typeZoom;
synchronized (this) {
filter = getFilterState();
typeZoom = getEventTypeZoom();
}
try {
return eventCountsCache.get(new ZoomState(timeRange, typeZoom, filter, null));
} catch (ExecutionException executionException) {
throw new TskCoreException("Error getting cached event counts.`1", executionException);
}
}
/**
* @return The smallest interval spanning all the events from the case,
* ignoring any filters or requested ranges.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval() throws TskCoreException {
return new Interval(getMinTime() * 1000, 1000 + getMaxTime() * 1000);
}
/**
* Get the smallest interval spanning all the given events.
*
* @param eventIDs The IDs of the events to get a spanning interval arround.
*
* @return the smallest interval spanning all the given events
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
return eventManager.getSpanningInterval(eventIDs);
}
/**
* @return the time (in seconds from unix epoch) of the absolutely first
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMinTime() throws TskCoreException {
try {
return minCache.get("min"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached min time.", ex);
}
}
/**
* @return the time (in seconds from unix epoch) of the absolutely last
* event available from the repository, ignoring any filters or
* requested ranges
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Long getMaxTime() throws TskCoreException {
try {
return maxCache.get("max"); // NON-NLS
} catch (ExecutionException ex) {
throw new TskCoreException("Error getting cached max time.", ex);
}
}
synchronized public boolean handleContentTagAdded(ContentTagAddedEvent evt) throws TskCoreException {
ContentTag contentTag = evt.getAddedTag();
Content content = contentTag.getContent();
Set<Long> updatedEventIDs = addTag(content.getId(), null, contentTag);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleArtifactTagAdded(BlackBoardArtifactTagAddedEvent evt) throws TskCoreException {
BlackboardArtifactTag artifactTag = evt.getAddedTag();
BlackboardArtifact artifact = artifactTag.getArtifact();
Set<Long> updatedEventIDs = addTag(artifact.getObjectID(), artifact.getArtifactID(), artifactTag);
return postTagsAdded(updatedEventIDs);
}
synchronized public boolean handleContentTagDeleted(ContentTagDeletedEvent evt) throws TskCoreException {
DeletedContentTagInfo deletedTagInfo = evt.getDeletedTagInfo();
Content content = autoCase.getSleuthkitCase().getContentById(deletedTagInfo.getContentID());
boolean tagged = autoCase.getServices().getTagsManager().getContentTagsByContent(content).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(content.getId(), null, deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
synchronized public boolean handleArtifactTagDeleted(BlackBoardArtifactTagDeletedEvent evt) throws TskCoreException {
DeletedBlackboardArtifactTagInfo deletedTagInfo = evt.getDeletedTagInfo();
BlackboardArtifact artifact = autoCase.getSleuthkitCase().getBlackboardArtifact(deletedTagInfo.getArtifactID());
boolean tagged = autoCase.getServices().getTagsManager().getBlackboardArtifactTagsByArtifact(artifact).isEmpty() == false;
Set<Long> updatedEventIDs = deleteTag(artifact.getObjectID(), artifact.getArtifactID(), deletedTagInfo.getTagID(), tagged);
return postTagsDeleted(updatedEventIDs);
}
/**
* Get a Set of event IDs for the events that are derived from the given
* file.
*
* @param file The AbstractFile to get derived event IDs
* for.
* @param includeDerivedArtifacts If true, also get event IDs for events
* derived from artifacts derived form this
* file. If false, only gets events derived
* directly from this file (file system
* timestamps).
*
* @return A Set of event IDs for the events that are derived from the given
* file.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public Set<Long> getEventIDsForFile(AbstractFile file, boolean includeDerivedArtifacts) throws TskCoreException {
return eventManager.getEventIDsForFile(file, includeDerivedArtifacts);
}
/**
* Get a List of event IDs for the events that are derived from the given
* artifact.
*
* @param artifact The BlackboardArtifact to get derived event IDs for.
*
* @return A List of event IDs for the events that are derived from the
* given artifact.
*
* @throws org.sleuthkit.datamodel.TskCoreException
*/
public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
return eventManager.getEventIDsForArtifact(artifact);
}
/**
* Post a TagsAddedEvent to all registered subscribers, if the given set of
* updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsAddedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsAdded(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsAddedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Post a TagsDeletedEvent to all registered subscribers, if the given set
* of updated event IDs is not empty.
*
* @param updatedEventIDs The set of event ids to be included in the
* TagsDeletedEvent.
*
* @return True if an event was posted.
*/
private boolean postTagsDeleted(Set<Long> updatedEventIDs) {
boolean tagsUpdated = !updatedEventIDs.isEmpty();
if (tagsUpdated) {
eventbus.post(new TagsDeletedEvent(updatedEventIDs));
}
return tagsUpdated;
}
/**
* Register the given object to receive events.
*
* @param subscriber The object to register. Must implement public methods
* annotated with Subscribe.
*/
synchronized public void registerForEvents(Object subscriber) {
eventbus.register(subscriber);
}
/**
* Un-register the given object, so it no longer receives events.
*
* @param subscriber The object to un-register.
*/
synchronized public void unRegisterForEvents(Object subscriber) {
eventbus.unregister(subscriber);
}
/**
* Post a RefreshRequestedEvent to all registered subscribers.
*/
public void postRefreshRequest() {
eventbus.post(new RefreshRequestedEvent());
}
/**
* (Re)Post an AutopsyEvent received from another event distribution system
* locally to all registered subscribers.
*
* @param event The event to re-post.
*/
public void postAutopsyEventLocally(AutopsyEvent event) {
eventbus.post(event);
}
public ImmutableList<TimelineEventType> getEventTypes() {
return eventManager.getEventTypes();
}
synchronized public Set<Long> addTag(long objID, Long artifactID, Tag tag) throws TskCoreException {
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, true);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
synchronized public Set<Long> deleteTag(long objID, Long artifactID, long tagID, boolean tagged) throws TskCoreException {
Set<Long> updatedEventIDs = eventManager.setEventsTagged(objID, artifactID, tagged);
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
synchronized public Set<Long> setHashHit(Collection<BlackboardArtifact> artifacts, boolean hasHashHit) throws TskCoreException {
Set<Long> updatedEventIDs = new HashSet<>();
for (BlackboardArtifact artifact : artifacts) {
updatedEventIDs.addAll(eventManager.setEventsHashed(artifact.getObjectID(), hasHashHit));
}
if (isNotEmpty(updatedEventIDs)) {
invalidateCaches(updatedEventIDs);
}
return updatedEventIDs;
}
/**
* Invalidate the timeline caches for the given event IDs. Also forces the
* filter values to be updated with any new values from the case data.( data
* sources, tags, etc)
*
* @param updatedEventIDs A collection of the event IDs whose cached event
* objects should be invalidated. Can be null or an
* empty sett to invalidate the general caches, such
* as min/max time, or the counts per event type.
*
* @throws TskCoreException
*/
public synchronized void invalidateCaches(Collection<Long> updatedEventIDs) throws TskCoreException {
minCache.invalidateAll();
maxCache.invalidateAll();
idToEventCache.invalidateAll(emptyIfNull(updatedEventIDs));
eventCountsCache.invalidateAll();
populateFilterData();
eventbus.post(new CacheInvalidatedEvent());
}
/**
* Event fired when a cache has been invalidated. The UI should make it
* clear that the view is potentially out of date and present an action to
* refresh the view.
*/
public static class CacheInvalidatedEvent {
private CacheInvalidatedEvent() {
}
}
}

View File

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,15 +19,19 @@
package org.sleuthkit.autopsy.timeline;
/**
*
* The org.openide.modules.OnStart annotation tells NetBeans to invoke this
* class's run method.
*/
public class TimeLineException extends Exception {
@org.openide.modules.OnStart
public class OnStart implements Runnable {
public TimeLineException(String string, Exception e) {
super(string, e);
}
public TimeLineException(String string) {
super(string);
/**
* This method is invoked by virtue of the OnStart annotation on the this
* class
*/
@Override
public void run() {
TimeLineModule.onStart();
}
}

View File

@ -19,7 +19,6 @@
package org.sleuthkit.autopsy.timeline;
import java.awt.Component;
import java.io.IOException;
import java.util.logging.Level;
import javafx.application.Platform;
import javax.swing.ImageIcon;
@ -47,10 +46,13 @@ import org.sleuthkit.datamodel.TskCoreException;
* An Action that opens the Timeline window. Has methods to open the window in
* various specific states (e.g., showing a specific artifact in the List View)
*/
@ActionID(category = "Tools", id = "org.sleuthkit.autopsy.timeline.Timeline")
@ActionRegistration(displayName = "#CTL_MakeTimeline", lazy = false)
@ActionReferences(value = {
@ActionReference(path = "Menu/Tools", position = 102),
@ActionReference(path = "Menu/Tools", position = 102)
,
@ActionReference(path = "Toolbars/Case", position = 102)})
public final class OpenTimelineAction extends CallableSystemAction {
@ -58,19 +60,10 @@ public final class OpenTimelineAction extends CallableSystemAction {
private static final Logger logger = Logger.getLogger(OpenTimelineAction.class.getName());
private static final int FILE_LIMIT = 6_000_000;
private static TimeLineController timeLineController = null;
private final JMenuItem menuItem;
private final JButton toolbarButton = new JButton(getName(),
new ImageIcon(getClass().getResource("images/btn_icon_timeline_colorized_26.png"))); //NON-NLS
/**
* Invalidate the reference to the controller so that a new one will be
* instantiated the next time this action is invoked
*/
synchronized static void invalidateController() {
timeLineController = null;
}
public OpenTimelineAction() {
toolbarButton.addActionListener(actionEvent -> performAction());
@ -93,24 +86,24 @@ public final class OpenTimelineAction extends CallableSystemAction {
public void performAction() {
if (tooManyFiles()) {
Platform.runLater(PromptDialogManager::showTooManyFiles);
synchronized (OpenTimelineAction.this) {
if (timeLineController != null) {
timeLineController.shutDownTimeLine();
}
}
setEnabled(false);
}else if("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) {
} else if ("false".equals(ModuleSettings.getConfigSetting("timeline", "enable_timeline"))) {
Platform.runLater(PromptDialogManager::showTimeLineDisabledMessage);
setEnabled(false);
}else {
showTimeline();
} else {
try {
showTimeline();
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
logger.log(Level.SEVERE, "Error showingtimeline.", ex);
}
}
}
@NbBundle.Messages({
"OpenTimelineAction.settingsErrorMessage=Failed to initialize timeline settings.",
"OpenTimeLineAction.msgdlg.text=Could not create timeline, there are no data sources."})
synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) {
synchronized private void showTimeline(AbstractFile file, BlackboardArtifact artifact) throws TskCoreException {
try {
Case currentCase = Case.getCurrentCaseThrows();
if (currentCase.hasData() == false) {
@ -118,20 +111,8 @@ public final class OpenTimelineAction extends CallableSystemAction {
logger.log(Level.INFO, "Could not create timeline, there are no data sources.");// NON-NLS
return;
}
try {
if (timeLineController == null) {
timeLineController = new TimeLineController(currentCase);
} else if (timeLineController.getAutopsyCase() != currentCase) {
timeLineController.shutDownTimeLine();
timeLineController = new TimeLineController(currentCase);
}
timeLineController.showTimeLine(file, artifact);
} catch (IOException iOException) {
MessageNotifyUtil.Message.error(Bundle.OpenTimelineAction_settingsErrorMessage());
logger.log(Level.SEVERE, "Failed to initialize per case timeline settings.", iOException);
}
TimeLineController controller = TimeLineModule.getController();
controller.showTimeLine(file, artifact);
} catch (NoCurrentCaseException e) {
//there is no case... Do nothing.
}
@ -141,7 +122,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* Open the Timeline window with the default initial view.
*/
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showTimeline() {
public void showTimeline() throws TskCoreException {
showTimeline(null, null);
}
@ -153,7 +134,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* @param file The AbstractFile to show in the Timeline.
*/
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showFileInTimeline(AbstractFile file) {
public void showFileInTimeline(AbstractFile file) throws TskCoreException {
showTimeline(file, null);
}
@ -164,7 +145,7 @@ public final class OpenTimelineAction extends CallableSystemAction {
* @param artifact The BlackboardArtifact to show in the Timeline.
*/
@ThreadConfined(type = ThreadConfined.ThreadType.AWT)
public void showArtifactInTimeline(BlackboardArtifact artifact) {
public void showArtifactInTimeline(BlackboardArtifact artifact) throws TskCoreException {
showTimeline(null, artifact);
}

View File

@ -1,175 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2016-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.Properties;
import org.apache.commons.lang3.StringUtils;
import org.sleuthkit.autopsy.casemodule.Case;
/**
* Provides access to per-case timeline properties (key-value store).
*/
class PerCaseTimelineProperties {
private static final String STALE_KEY = "stale"; //NON-NLS
private static final String WAS_INGEST_RUNNING_KEY = "was_ingest_running"; // NON-NLS
private final Path propertiesPath;
PerCaseTimelineProperties(Case autopsyCase) {
Objects.requireNonNull(autopsyCase, "Case must not be null");
propertiesPath = Paths.get(autopsyCase.getModuleDirectory(), "Timeline", "timeline.properties"); //NON-NLS
}
/**
* Is the DB stale, i.e. does it need to be updated because new datasources
* (eg) have been added to the case.
*
* @return true if the db is stale
*
* @throws IOException if there is a problem reading the state from disk
*/
public synchronized boolean isDBStale() throws IOException {
String stale = getProperty(STALE_KEY);
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
}
/**
* record the state of the events db as stale(true) or not stale(false).
*
* @param stale the new state of the event db. true for stale, false for not
* stale.
*
* @throws IOException if there was a problem writing the state to disk.
*/
public synchronized void setDbStale(Boolean stale) throws IOException {
setProperty(STALE_KEY, stale.toString());
}
/**
* Was ingest running the last time the database was updated?
*
* @return true if ingest was running the last time the db was updated
*
* @throws IOException if there was a problem reading from disk
*/
public synchronized boolean wasIngestRunning() throws IOException {
String stale = getProperty(WAS_INGEST_RUNNING_KEY);
return StringUtils.isBlank(stale) ? true : Boolean.valueOf(stale);
}
/**
* record whether ingest was running during the last time the database was
* updated
*
* @param ingestRunning true if ingest was running
*
* @throws IOException if there was a problem writing to disk
*/
public synchronized void setIngestRunning(Boolean ingestRunning) throws IOException {
setProperty(WAS_INGEST_RUNNING_KEY, ingestRunning.toString());
}
/**
* Get a {@link Path} to the properties file. If the file does not exist, it
* will be created.
*
* @return the Path to the properties file.
*
* @throws IOException if there was a problem creating the properties file
*/
private synchronized Path getPropertiesPath() throws IOException {
if (!Files.exists(propertiesPath)) {
Path parent = propertiesPath.getParent();
Files.createDirectories(parent);
Files.createFile(propertiesPath);
}
return propertiesPath;
}
/**
* Returns the property with the given key.
*
* @param propertyKey - The property key to get the value for.
*
* @return - the value associated with the property.
*
* @throws IOException if there was a problem reading the property from disk
*/
private synchronized String getProperty(String propertyKey) throws IOException {
return getProperties().getProperty(propertyKey);
}
/**
* Sets the given property to the given value.
*
* @param propertyKey - The key of the property to be modified.
* @param propertyValue - the value to set the property to.
*
* @throws IOException if there was a problem writing the property to disk
*/
private synchronized void setProperty(String propertyKey, String propertyValue) throws IOException {
Path propertiesFile = getPropertiesPath();
Properties props = getProperties(propertiesFile);
props.setProperty(propertyKey, propertyValue);
try (OutputStream fos = Files.newOutputStream(propertiesFile)) {
props.store(fos, ""); //NON-NLS
}
}
/**
* Get a {@link Properties} object used to store the timeline properties.
*
* @return a properties object
*
* @throws IOException if there was a problem reading the .properties file
*/
private synchronized Properties getProperties() throws IOException {
return getProperties(getPropertiesPath());
}
/**
* Gets a {@link Properties} object populated form the given .properties
* file.
*
* @param propertiesFile a path to the .properties file to load
*
* @return a properties object
*
* @throws IOException if there was a problem reading the .properties file
*/
private synchronized Properties getProperties(final Path propertiesFile) throws IOException {
try (InputStream inputStream = Files.newInputStream(propertiesFile)) {
Properties props = new Properties();
props.load(inputStream);
return props;
}
}
}

View File

@ -152,7 +152,7 @@ public final class PromptDialogManager {
* @return True if they want to continue anyways.
*/
@NbBundle.Messages({
"PromptDialogManager.confirmDuringIngest.headerText=You are trying to update the Timeline DB before ingest has been completed. The Timeline DB may be incomplete.",
"PromptDialogManager.confirmDuringIngest.headerText=Ingest is still going, and the Timeline may be incomplete.",
"PromptDialogManager.confirmDuringIngest.contentText=Do you want to continue?"})
@ThreadConfined(type = ThreadConfined.ThreadType.JFX)
boolean confirmDuringIngest() {
@ -235,5 +235,4 @@ public final class PromptDialogManager {
dialog.setHeaderText(Bundle.PromptDialogManager_showTimeLineDisabledMessage_headerText());
dialog.showAndWait();
}
}

View File

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2011-2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -25,10 +25,12 @@ import java.time.Instant;
import java.time.temporal.ChronoField;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.stream.Collectors;
import javafx.beans.binding.Bindings;
import javafx.beans.property.SimpleObjectProperty;
import javafx.fxml.FXML;
@ -58,14 +60,15 @@ import org.controlsfx.validation.Validator;
import org.joda.time.Interval;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.timeline.datamodel.SingleEvent;
import org.sleuthkit.autopsy.timeline.datamodel.eventtype.EventType;
import org.sleuthkit.autopsy.timeline.events.ViewInTimelineRequestedEvent;
import org.sleuthkit.autopsy.timeline.ui.EventTypeUtils;
import org.sleuthkit.autopsy.timeline.utils.IntervalUtils;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEventType;
import org.sleuthkit.datamodel.TimelineEvent;
/**
* A Dialog that, given an AbstractFile or BlackBoardArtifact, allows the user
@ -93,13 +96,13 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
ChronoField.SECOND_OF_MINUTE);
@FXML
private TableView<SingleEvent> eventTable;
private TableView<TimelineEvent> eventTable;
@FXML
private TableColumn<SingleEvent, EventType> typeColumn;
private TableColumn<TimelineEvent, TimelineEventType> typeColumn;
@FXML
private TableColumn<SingleEvent, Long> dateTimeColumn;
private TableColumn<TimelineEvent, Long> dateTimeColumn;
@FXML
private Spinner<Integer> amountSpinner;
@ -112,8 +115,6 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
private final VBox contentRoot = new VBox();
private final TimeLineController controller;
private final ValidationSupport validationSupport = new ValidationSupport();
/**
@ -124,10 +125,8 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
* from.
*/
@NbBundle.Messages({
"ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."
})
private ShowInTimelineDialog(TimeLineController controller, List<Long> eventIDS) {
this.controller = controller;
"ShowInTimelineDialog.amountValidator.message=The entered amount must only contain digits."})
private ShowInTimelineDialog(TimeLineController controller, Collection<Long> eventIDS) throws TskCoreException {
//load dialog content fxml
final String name = "nbres:/" + StringUtils.replace(ShowInTimelineDialog.class.getPackage().getName(), ".", "/") + "/ShowInTimelineDialog.fxml"; // NON-NLS
@ -195,7 +194,16 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
dateTimeColumn.setCellFactory(param -> new DateTimeTableCell<>());
//add events to table
eventTable.getItems().setAll(eventIDS.stream().map(controller.getEventsModel()::getEventById).collect(Collectors.toSet()));
Set<TimelineEvent> events = new HashSet<>();
FilteredEventsModel eventsModel = controller.getEventsModel();
for (Long eventID : eventIDS) {
try {
events.add(eventsModel.getEventById(eventID));
} catch (TskCoreException ex) {
throw new TskCoreException("Error getting event by id.", ex);
}
}
eventTable.getItems().setAll(events);
eventTable.setPrefHeight(Math.min(200, 24 * eventTable.getItems().size() + 28));
}
@ -207,7 +215,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
* @param artifact The BlackboardArtifact to configure this dialog for.
*/
@NbBundle.Messages({"ShowInTimelineDialog.artifactTitle=View Result in Timeline."})
ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) {
ShowInTimelineDialog(TimeLineController controller, BlackboardArtifact artifact) throws TskCoreException {
//get events IDs from artifact
this(controller, controller.getEventsModel().getEventIDsForArtifact(artifact));
@ -237,7 +245,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
@NbBundle.Messages({"# {0} - file path",
"ShowInTimelineDialog.fileTitle=View {0} in timeline.",
"ShowInTimelineDialog.eventSelectionValidator.message=You must select an event."})
ShowInTimelineDialog(TimeLineController controller, AbstractFile file) {
ShowInTimelineDialog(TimeLineController controller, AbstractFile file) throws TskCoreException {
this(controller, controller.getEventsModel().getEventIDsForFile(file, false));
/*
@ -293,11 +301,11 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
/**
* Construct this Dialog's "result" from the given event.
*
* @param selectedEvent The SingleEvent to include in the EventInTimeRange
* @param selectedEvent The TimeLineEvent to include in the EventInTimeRange
*
* @return The EventInTimeRange that is the "result" of this dialog.
*/
private ViewInTimelineRequestedEvent makeEventInTimeRange(SingleEvent selectedEvent) {
private ViewInTimelineRequestedEvent makeEventInTimeRange(TimelineEvent selectedEvent) {
Duration selectedDuration = unitComboBox.getSelectionModel().getSelectedItem().getBaseUnit().getDuration().multipliedBy(amountSpinner.getValue());
Interval range = IntervalUtils.getIntervalAround(Instant.ofEpochMilli(selectedEvent.getStartMillis()), selectedDuration);
return new ViewInTimelineRequestedEvent(Collections.singleton(selectedEvent.getEventID()), range);
@ -341,14 +349,14 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
}
/**
* TableCell that shows a EventType including the associated icon.
* TableCell that shows a TimelineEventType including the associated icon.
*
* @param <X> Anything
*/
static private class TypeTableCell<X> extends TableCell<X, EventType> {
static private class TypeTableCell<X> extends TableCell<X, TimelineEventType> {
@Override
protected void updateItem(EventType item, boolean empty) {
protected void updateItem(TimelineEventType item, boolean empty) {
super.updateItem(item, empty);
if (item == null || empty) {
@ -356,7 +364,7 @@ final class ShowInTimelineDialog extends Dialog<ViewInTimelineRequestedEvent> {
setGraphic(null);
} else {
setText(item.getDisplayName());
setGraphic(new ImageView(item.getFXImage()));
setGraphic(new ImageView(EventTypeUtils.getImagePath(item)));
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,131 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.logging.Level;
import javafx.application.Platform;
import javax.swing.SwingUtilities;
import org.sleuthkit.autopsy.casemodule.Case;
import static org.sleuthkit.autopsy.casemodule.Case.Events.CURRENT_CASE;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException;
/**
* Manages listeners and the controller.
*
*/
public class TimeLineModule {
private static final Logger logger = Logger.getLogger(TimeLineModule.class.getName());
private static final Object controllerLock = new Object();
private static TimeLineController controller;
/**
* provides static utilities, can not be instantiated
*/
private TimeLineModule() {
}
/**
* Get instance of the controller for the current case
*
* @return the controller for the current case.
*
* @throws NoCurrentCaseException If there is no case open.
* @throws TskCoreException If there was a problem accessing the case
* database.
*
*/
public static TimeLineController getController() throws NoCurrentCaseException, TskCoreException {
synchronized (controllerLock) {
if (controller == null) {
controller = new TimeLineController(Case.getCurrentCaseThrows());
}
return controller;
}
}
/**
* This method is invoked by virtue of the OnStart annotation on the OnStart
* class class
*/
static void onStart() {
Platform.setImplicitExit(false);
logger.info("Setting up TimeLine listeners"); //NON-NLS
IngestManager.getInstance().addIngestModuleEventListener(new IngestModuleEventListener());
Case.addPropertyChangeListener(new CaseEventListener());
}
/**
* Listener for case events.
*/
static private class CaseEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
getController().handleCaseEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
if (Case.Events.valueOf(evt.getPropertyName()).equals(CURRENT_CASE)) {
// we care only about case closing here
if (evt.getNewValue() == null) {
synchronized (controllerLock) {
if (controller != null) {
SwingUtilities.invokeLater(controller::shutDownTimeLine);
}
controller = null;
}
}
}
}
}
/**
* Listener for IngestModuleEvents
*/
static private class IngestModuleEventListener implements PropertyChangeListener {
@Override
public void propertyChange(PropertyChangeEvent evt) {
try {
getController().handleIngestModuleEvent(evt);
} catch (NoCurrentCaseException ex) {
// ignore
return;
} catch (TskCoreException ex) {
MessageNotifyUtil.Message.error("Error creating timeline controller.");
logger.log(Level.SEVERE, "Error creating timeline controller", ex);
}
}
}
}

View File

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.timeline;
import com.google.common.collect.ImmutableList;
import java.awt.BorderLayout;
import java.awt.Component;
import java.awt.KeyboardFocusManager;
@ -58,6 +59,7 @@ import org.openide.windows.RetainLocation;
import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.actions.AddBookmarkTagAction;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
import org.sleuthkit.autopsy.corecomponents.DataContentPanel;
@ -78,6 +80,7 @@ import org.sleuthkit.autopsy.timeline.ui.detailview.tree.EventsTree;
import org.sleuthkit.autopsy.timeline.ui.filtering.FilterSetPanel;
import org.sleuthkit.autopsy.timeline.zooming.ZoomSettingsPane;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.VersionNumber;
/**
* TopComponent for the Timeline feature.
@ -165,7 +168,9 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
*/
@Override
public void invalidated(Observable observable) {
List<Long> selectedEventIDs = controller.getSelectedEventIDs();
// make a copy because this list gets updated as the user navigates around
// and causes concurrent access exceptions
List<Long> selectedEventIDs = ImmutableList.copyOf(controller.getSelectedEventIDs());
//depending on the active view mode, we either update the dataResultPanel, or update the contentViewerPanel directly.
switch (controller.getViewMode()) {
@ -196,9 +201,6 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
contentViewerPanel.setNode(null);
}
});
} catch (NoCurrentCaseException ex) {
//Since the case is closed, the user probably doesn't care about this, just log it as a precaution.
logger.log(Level.SEVERE, "There was no case open to lookup the Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Failed to lookup Sleuthkit object backing a SingleEvent.", ex); // NON-NLS
Platform.runLater(() -> {
@ -254,10 +256,11 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
}
/**
* Constructs a "shell" version of the top component for the Timeline
* feature which has only Swing components, no controller, and no listeners.
* This constructor conforms to the NetBeans window system requirement that
* Constructs a "shell" version of the top component for this Timeline feature
* which has only Swing components, no controller, and no listeners.
* This constructor conforms to the NetBeans window system requirements that
* all top components have a public, no argument constructor.
*
*/
public TimeLineTopComponent() {
initComponents();
@ -280,21 +283,21 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
dataResultPanel.open(); //get the explorermanager
contentViewerPanel.initialize();
}
/**
* Constructs a fully functional top component for the Timeline feature.
*
* @param controller The TimeLineController for this top component.
* Constructs a full functional top component for the Timeline feature.
*
* @param controller The TimeLineController for ths top compenent.
*/
public TimeLineTopComponent(TimeLineController controller) {
this();
this.controller = controller;
Platform.runLater(this::initFXComponents);
//set up listeners
TimeLineController.getTimeZone().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString()));
TimeLineController.timeZoneProperty().addListener(timeZone -> dataResultPanel.setPath(getResultViewerSummaryString()));
controller.getSelectedEventIDs().addListener(selectedEventsListener);
//Listen to ViewMode and adjust GUI componenets as needed.
@ -459,6 +462,9 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
private javax.swing.JSplitPane splitYPane;
// End of variables declaration//GEN-END:variables
@NbBundle.Messages ({
"Timeline.old.version= This Case was created with an older version of Autopsy.\nThe Timeline with not show events from data sources added with the older version of Autopsy"
})
@Override
public void componentOpened() {
super.componentOpened();
@ -467,6 +473,18 @@ public final class TimeLineTopComponent extends TopComponent implements Explorer
//add listener that maintains correct selection in the Global Actions Context
KeyboardFocusManager.getCurrentKeyboardFocusManager()
.addPropertyChangeListener("focusOwner", focusPropertyListener);
VersionNumber version = Case.getCurrentCase().getSleuthkitCase().getDBSchemaCreationVersion();
int major = version.getMajor();
int minor = version.getMinor();
if(major < 8 || (major == 8 && minor <= 2)) {
Platform.runLater(() -> {
Notifications.create()
.owner(jFXViewPanel.getScene().getWindow())
.text(Bundle.Timeline_old_version()).showInformation();
});
}
}
@Override

0
Core/src/org/sleuthkit/autopsy/timeline/ViewMode.java Normal file → Executable file
View File

View File

@ -1,9 +1,21 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline;
import javafx.scene.control.ListCell;

View File

@ -0,0 +1,319 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2019 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.actions;
import java.awt.Dialog;
import java.time.Instant;
import java.time.LocalDateTime;
import java.time.ZoneId;
import static java.util.Arrays.asList;
import java.util.List;
import java.util.Objects;
import java.util.logging.Level;
import javafx.application.Platform;
import javafx.embed.swing.JFXPanel;
import javafx.fxml.FXML;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.ButtonBase;
import javafx.scene.control.ButtonType;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.ComboBox;
import javafx.scene.control.DialogPane;
import javafx.scene.control.TextField;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.util.StringConverter;
import javax.swing.JDialog;
import javax.swing.SwingUtilities;
import jfxtras.scene.control.LocalDateTimeTextField;
import org.apache.commons.lang3.StringUtils;
import org.controlsfx.control.action.Action;
import org.controlsfx.control.textfield.TextFields;
import org.controlsfx.tools.ValueExtractor;
import org.controlsfx.validation.ValidationSupport;
import org.controlsfx.validation.Validator;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
import org.sleuthkit.autopsy.timeline.FXMLConstructor;
import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.datamodel.Blackboard;
import org.sleuthkit.datamodel.BlackboardArtifact;
import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
import org.sleuthkit.datamodel.BlackboardAttribute;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DESCRIPTION;
import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
import org.sleuthkit.datamodel.DataSource;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TimelineEventType;
/**
* Action that allows the user the manually create timeline events. It prompts
* the user for event data and then adds it to the case via an artifact.
*/
@NbBundle.Messages({
"AddManualEvent.text=Add Event",
"AddManualEvent.longText=Manually add an event to the timeline."})
public class AddManualEvent extends Action {
private final static Logger logger = Logger.getLogger(AddManualEvent.class.getName());
private static final String MANUAL_CREATION = "Manual Creation"; //NON-NLS
private static final Image ADD_EVENT_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/add.png", 16, 16, true, true, true); // NON-NLS
/**
* Initialize the custom value extractor used by the ValidationSupport for
* the LocalDateTimeTextField in the EventCreationDialogPane.
*/
static {
ValueExtractor.addObservableValueExtractor(LocalDateTimeTextField.class::isInstance,
control -> ((LocalDateTimeTextField) control).localDateTimeProperty());
}
/**
* Create an Action that allows the user the manually create timeline
* events. It prompts the user for event data with a dialog and then adds it
* to the case via an artifact. The datetiem in the dialog will be set to
* "now" when the action is invoked.
*
* @param controller The controller for this action to use.
*
*/
public AddManualEvent(TimeLineController controller) {
this(controller, null);
}
/**
* Create an Action that allows the user the manually create timeline
* events. It prompts the user for event data with a dialog and then adds it
* to the case via an artifact.
*
* @param controller The controller for this action to use.
* @param epochMillis The initial datetime to populate the dialog with. The
* user can ove ride this.
*/
public AddManualEvent(TimeLineController controller, Long epochMillis) {
super(Bundle.AddManualEvent_text());
setGraphic(new ImageView(ADD_EVENT_IMAGE));
setLongText(Bundle.AddManualEvent_longText());
setEventHandler(actionEvent -> SwingUtilities.invokeLater(() -> {
JEventCreationDialog dialog = new JEventCreationDialog(controller, epochMillis, SwingUtilities.windowForComponent(controller.getTopComponent()));
dialog.setVisible(true);
//actual event creation happens in the ok button listener.
}));
}
/**
* Use the supplied ManualEventInfo to make an TSK_TL_EVENT artifact which
* will trigger adding a TimelineEvent.
*
* @param eventInfo The ManualEventInfo with the info needed to create an
* event.
*
* @throws IllegalArgumentException
*/
@NbBundle.Messages({
"AddManualEvent.createArtifactFailed=Failed to create artifact for event.",
"AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard."})
private void addEvent(TimeLineController controller, ManualEventInfo eventInfo) throws IllegalArgumentException {
SleuthkitCase sleuthkitCase = controller.getEventsModel().getSleuthkitCase();
try {
//Use the current examiners name plus a fixed string as the source / module name.
String source = MANUAL_CREATION + ": " + sleuthkitCase.getCurrentExaminer().getLoginName();
BlackboardArtifact artifact = sleuthkitCase.newBlackboardArtifact(TSK_TL_EVENT, eventInfo.datasource.getId());
artifact.addAttributes(asList(
new BlackboardAttribute(
TSK_TL_EVENT_TYPE, source,
TimelineEventType.USER_CREATED.getTypeID()),
new BlackboardAttribute(
TSK_DESCRIPTION, source,
eventInfo.description),
new BlackboardAttribute(
TSK_DATETIME, source,
eventInfo.time)
));
try {
sleuthkitCase.getBlackboard().postArtifact(artifact, source);
} catch (Blackboard.BlackboardException ex) {
logger.log(Level.SEVERE, "Error posting artifact to the blackboard.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_postArtifactFailed(), ButtonType.OK).showAndWait();
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creatig new artifact.", ex); //NON-NLS
new Alert(Alert.AlertType.ERROR, Bundle.AddManualEvent_createArtifactFailed(), ButtonType.OK).showAndWait();
}
}
/**
* Subclass of JDialog used to dislpay the JFXPanel with the event creation
* widgets.
*/
private final class JEventCreationDialog extends JDialog {
private final JFXPanel jfxPanel = new JFXPanel();
private JEventCreationDialog(TimeLineController controller, Long epochMillis, java.awt.Window owner) {
super(owner, Bundle.AddManualEvent_text(), Dialog.ModalityType.DOCUMENT_MODAL);
setIconImages(owner.getIconImages());
setResizable(false);
add(jfxPanel);
// make and configure the JavaFX components.
Platform.runLater(() -> {
// Custom DialogPane defined below.
EventCreationDialogPane customPane = new EventCreationDialogPane(controller, epochMillis);
//cancel button just closes the dialog
((ButtonBase) customPane.lookupButton(ButtonType.CANCEL)).setOnAction(event -> dispose());
//configure ok button to pull ManualEventInfo object and add it to case.
((ButtonBase) customPane.lookupButton(ButtonType.OK)).setOnAction(event -> {
ManualEventInfo manualEventInfo = customPane.getManualEventInfo();
if (manualEventInfo != null) {
addEvent(controller, manualEventInfo);
}
dispose(); //close and dispose the dialog.
});
jfxPanel.setScene(new Scene(customPane));
customPane.installValidation();
SwingUtilities.invokeLater(() -> {
//size and position dialog on EDT
pack();
setLocationRelativeTo(owner);
});
});
}
/**
* The DialogPane that hosts the controls/widgets that allows the user
* to enter the event information.
*/
private class EventCreationDialogPane extends DialogPane {
@FXML
private ChoiceBox<DataSource> dataSourceChooser;
@FXML
private TextField descriptionTextField;
@FXML
private ComboBox<String> timeZoneChooser;
@FXML
private LocalDateTimeTextField timePicker;
private final List<String> timeZoneList = TimeZoneUtils.createTimeZoneList();
private final ValidationSupport validationSupport = new ValidationSupport();
private final TimeLineController controller;
private EventCreationDialogPane(TimeLineController controller, Long epochMillis) {
this.controller = controller;
FXMLConstructor.construct(this, "EventCreationDialog.fxml"); //NON-NLS
if (epochMillis == null) {
timePicker.setLocalDateTime(LocalDateTime.now());
} else {
timePicker.setLocalDateTime(LocalDateTime.ofInstant(Instant.ofEpochMilli(epochMillis), TimeLineController.getTimeZoneID()));
}
}
@FXML
@NbBundle.Messages({"# {0} - datasource name", "# {1} - datasource id",
"AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})",
"AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case."})
private void initialize() {
assert descriptionTextField != null : "fx:id=\"descriptionTextField\" was not injected: check your FXML file 'EventCreationDialog.fxml'.";//NON-NLS
timeZoneChooser.getItems().setAll(timeZoneList);
timeZoneChooser.getSelectionModel().select(TimeZoneUtils.createTimeZoneString(TimeLineController.getTimeZone()));
TextFields.bindAutoCompletion(timeZoneChooser.getEditor(), timeZoneList);
dataSourceChooser.setConverter(new StringConverter<DataSource>() {
@Override
public String toString(DataSource dataSource) {
return Bundle.AddManualEvent_EventCreationDialogPane_dataSourceStringConverter_template(dataSource.getName(), dataSource.getId());
}
@Override
public DataSource fromString(String string) {
throw new UnsupportedOperationException(); // This method should never get called.
}
});
try {
dataSourceChooser.getItems().setAll(controller.getAutopsyCase().getSleuthkitCase().getDataSources());
dataSourceChooser.getSelectionModel().select(0);
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error getting datasources in case.", ex);//NON-NLS
SwingUtilities.invokeLater(() -> MessageNotifyUtil.Message.error(Bundle.AddManualEvent_EventCreationDialogPane_initialize_dataSourcesError()));
}
}
/**
* Install/Configure the ValidationSupport.
*/
@NbBundle.Messages({
"AddManualEvent.validation.description=Description is required.",
"AddManualEvent.validation.datetime=Invalid datetime",
"AddManualEvent.validation.timezone=Invalid time zone",})
private void installValidation() {
validationSupport.registerValidator(descriptionTextField, false,
Validator.createEmptyValidator(Bundle.AddManualEvent_validation_description()));
validationSupport.registerValidator(timePicker, false,
Validator.createPredicateValidator(Objects::nonNull, Bundle.AddManualEvent_validation_description()));
validationSupport.registerValidator(timeZoneChooser, false,
Validator.createPredicateValidator((String zone) -> timeZoneList.contains(zone.trim()), Bundle.AddManualEvent_validation_timezone()));
validationSupport.initInitialDecoration();
//The ok button is only enabled if all fields are validated.
lookupButton(ButtonType.OK).disableProperty().bind(validationSupport.invalidProperty());
}
/**
* Combine the user entered data into a ManulEventInfo object.
*
* @return The ManualEventInfo containing the user entered event
* info.
*/
private ManualEventInfo getManualEventInfo() {
//Trim off the offset part of the string from the chooser, to get something that ZoneId can parse.
String zone = StringUtils.substringAfter(timeZoneChooser.getValue(), ")").trim(); //NON-NLS
long toEpochSecond = timePicker.getLocalDateTime().atZone(ZoneId.of(zone)).toEpochSecond();
return new ManualEventInfo(dataSourceChooser.getValue(), descriptionTextField.getText(), toEpochSecond);
}
}
}
/**
* Info required from user to manually create a timeline event.
*/
private static class ManualEventInfo {
private final DataSource datasource;
private final String description;
private final long time;
private ManualEventInfo(DataSource datasource, String description, long time) {
this.datasource = datasource;
this.description = description;
this.time = time;
}
}
}

View File

@ -32,7 +32,7 @@ import org.sleuthkit.autopsy.timeline.TimeLineController;
//TODO: This and the corresponding imageanalyzer action are identical except for the type of the controller... abstract something! -jm
public class Back extends Action {
private static final Image BACK_IMAGE = new Image("/org/sleuthkit/autopsy/images/resultset_previous.png", 16, 16, true, true, true); // NON-NLS
private static final Image BACK_IMAGE = new Image("/org/sleuthkit/autopsy/timeline/images/arrow-180.png", 16, 16, true, true, true); // NON-NLS
private final TimeLineController controller;

View File

@ -1,3 +1,14 @@
AddManualEvent.createArtifactFailed=Failed to create artifact for event.
# {0} - datasource name
# {1} - datasource id
AddManualEvent.EventCreationDialogPane.dataSourceStringConverter.template={0} (ID: {1})
AddManualEvent.EventCreationDialogPane.initialize.dataSourcesError=Error getting datasources in case.
AddManualEvent.longText=Manually add an event to the timeline.
AddManualEvent.postArtifactFailed=Failed to post artifact to blackboard.
AddManualEvent.text=Add Event
AddManualEvent.validation.datetime=Invalid datetime
AddManualEvent.validation.description=Description is required.
AddManualEvent.validation.timezone=Invalid time zone
# {0} - action accelerator keys
Back.longText=Back: {0}\nGo back to the last view settings.
Back.text=Back
@ -10,8 +21,6 @@ OpenReportAction.MissingReportFileMessage=The report file no longer exists.
OpenReportAction.NoAssociatedEditorMessage=There is no associated editor for reports of this type or the associated application failed to launch.
OpenReportAction.NoOpenInEditorSupportMessage=This platform (operating system) does not support opening a file in an editor this way.
OpenReportAction.ReportFileOpenPermissionDeniedMessage=Permission to open the report file was denied.
RebuildDataBase.longText=Update the DB to include new events.
RebuildDataBase.text=Update DB
ResetFilters.text=Reset all filters
RestFilters.longText=Reset all filters to their default state.
SaveSnapShotAsReport.action.dialogs.title=Timeline
@ -32,8 +41,12 @@ ViewArtifactInTimelineAction.displayName=View Result in Timeline...
ViewFileInTimelineAction.viewFile.displayName=View File in Timeline...
ViewFileInTimelineAction.viewSourceFile.displayName=View Source File in Timeline...
ZoomIn.action.text=Zoom in
ZoomIn.errorMessage=Error zooming in.
ZoomIn.longText=Zoom in to view about half as much time.
ZoomOut.action.text=Zoom out
ZoomOut.disabledProperty.errorMessage=Error getting spanning interval.
ZoomOut.errorMessage=Error zooming out.
ZoomOut.longText=Zoom out to view about 50% more time.
ZoomToEvents.action.text=Zoom to events
ZoomToEvents.disabledProperty.errorMessage=Error getting spanning interval.
ZoomToEvents.longText=Zoom out to show the nearest events.

View File

Some files were not shown because too many files have changed in this diff Show More