Merge branch 'develop' of https://github.com/sleuthkit/autopsy into 3562-ZipPasswordUse

This commit is contained in:
William Schaefer 2018-03-16 13:53:43 -04:00
commit 9c93b6bb5a
29 changed files with 347 additions and 201 deletions

View File

@ -111,7 +111,7 @@ public class CorrelationAttributeInstance implements Serializable {
public String toString() { public String toString() {
return this.getID() return this.getID()
+ this.getCorrelationCase().getCaseUUID() + this.getCorrelationCase().getCaseUUID()
+ this.getCorrelationDataSource().getName() + this.getCorrelationDataSource().getDeviceID()
+ this.getFilePath() + this.getFilePath()
+ this.getKnownStatus() + this.getKnownStatus()
+ this.getComment(); + this.getComment();

View File

@ -17,6 +17,9 @@
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout"/> <Layout class="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout"/>
<SubComponents> <SubComponents>
<Container class="javax.swing.JSplitPane" name="jSplitPane1"> <Container class="javax.swing.JSplitPane" name="jSplitPane1">
<Properties>
<Property name="dividerLocation" type="int" value="500"/>
</Properties>
<Constraints> <Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout" value="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout$BorderConstraintsDescription"> <Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout" value="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout$BorderConstraintsDescription">
<BorderConstraints direction="Center"/> <BorderConstraints direction="Center"/>
@ -31,6 +34,8 @@
<JSplitPaneConstraints position="left"/> <JSplitPaneConstraints position="left"/>
</Constraint> </Constraint>
</Constraints> </Constraints>
<Layout class="org.netbeans.modules.form.compat2.layouts.support.JScrollPaneSupportLayout"/>
</Container> </Container>
</SubComponents> </SubComponents>
</Container> </Container>

View File

@ -34,6 +34,7 @@ import org.openide.nodes.Children;
import org.openide.util.Lookup; import org.openide.util.Lookup;
import org.openide.util.lookup.ProxyLookup; import org.openide.util.lookup.ProxyLookup;
import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.CommunicationsManager; import org.sleuthkit.datamodel.CommunicationsManager;
import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskCoreException;
@ -120,10 +121,12 @@ public final class AccountsBrowser extends JPanel implements ExplorerManager.Pro
@Subscribe @Subscribe
public void handleFilterEvent(CVTEvents.FilterChangeEvent filterChangeEvent) { public void handleFilterEvent(CVTEvents.FilterChangeEvent filterChangeEvent) {
try { try {
final CommunicationsManager commsManager = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager(); final CommunicationsManager commsManager = Case.getOpenCase().getSleuthkitCase().getCommunicationsManager();
accountsTableEM.setRootContext(new AbstractNode(Children.create(new AccountDeviceInstanceNodeFactory(commsManager, filterChangeEvent.getNewFilter()), true))); accountsTableEM.setRootContext(new AbstractNode(Children.create(new AccountDeviceInstanceNodeFactory(commsManager, filterChangeEvent.getNewFilter()), true)));
} catch (TskCoreException ex) { } catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was an error getting the CommunicationsManager for the current case.", ex); logger.log(Level.SEVERE, "There was an error getting the CommunicationsManager for the current case.", ex);
} catch (NoCurrentCaseException ex) { //NOPMD empty catch clause
//Case is closed, do nothig.
} }
} }
@ -141,6 +144,7 @@ public final class AccountsBrowser extends JPanel implements ExplorerManager.Pro
setLayout(new java.awt.BorderLayout()); setLayout(new java.awt.BorderLayout());
jSplitPane1.setDividerLocation(500);
jSplitPane1.setLeftComponent(outlineView); jSplitPane1.setLeftComponent(outlineView);
add(jSplitPane1, java.awt.BorderLayout.CENTER); add(jSplitPane1, java.awt.BorderLayout.CENTER);

View File

@ -129,7 +129,7 @@ final public class VisualizationPanel extends JPanel implements Lookup.Provider
private final mxGraphComponent graphComponent; private final mxGraphComponent graphComponent;
private final CommunicationsGraph graph; private final CommunicationsGraph graph;
private mxUndoManager undoManager = new mxUndoManager(); private final mxUndoManager undoManager = new mxUndoManager();
private final mxRubberband rubberband; private final mxRubberband rubberband;
private final mxFastOrganicLayout fastOrganicLayout; private final mxFastOrganicLayout fastOrganicLayout;
private final mxCircleLayout circleLayout; private final mxCircleLayout circleLayout;

View File

@ -1,3 +1 @@
OpenIDE-Module-Name=CoreComponentInterfaces OpenIDE-Module-Name=CoreComponentInterfaces
CoreComponentControl.CTL_DirectoryTreeTopComponent=Directory Tree
CoreComponentControl.CTL_FavoritesTopComponent=Favorites

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011 Basis Technology Corp. * Copyright 2011-18 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -19,31 +19,30 @@
package org.sleuthkit.autopsy.corecomponentinterfaces; package org.sleuthkit.autopsy.corecomponentinterfaces;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import java.util.logging.Level; import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.openide.util.Lookup; import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.windows.Mode; import org.openide.windows.Mode;
import org.openide.windows.TopComponent; import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager; import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.corecomponents.DataContentTopComponent; import org.sleuthkit.autopsy.corecomponents.DataContentTopComponent;
import org.sleuthkit.autopsy.coreutils.Logger;
/** /**
* Responsible for opening and closing the core windows when a case is opened * Responsible for opening and closing the core windows when a case is opened
* and closed. * and closed.
* *
* @author jantonius
*/ */
public class CoreComponentControl { final public class CoreComponentControl {
private static final Logger logger = Logger.getLogger(CoreComponentControl.class.getName()); private static final Logger logger = Logger.getLogger(CoreComponentControl.class.getName());
private static final String DIRECTORY_TREE = NbBundle.getMessage(CoreComponentControl.class, @NbBundle.Messages("CoreComponentControl.CTL_DirectoryTreeTopComponent=Directory Tree")
"CoreComponentControl.CTL_DirectoryTreeTopComponent"); private static final String DIRECTORY_TREE = Bundle.CoreComponentControl_CTL_DirectoryTreeTopComponent();
private static final String FAVORITES = NbBundle.getMessage(CoreComponentControl.class, @NbBundle.Messages("CoreComponentControl.CTL_FavoritesTopComponent=Favorites")
"CoreComponentControl.CTL_FavoritesTopComponent"); private static final String FAVORITES = Bundle.CoreComponentControl_CTL_FavoritesTopComponent();
private CoreComponentControl() {
}
/** /**
* Opens all TopComponent windows that are needed * Opens all TopComponent windows that are needed
@ -56,22 +55,22 @@ public class CoreComponentControl {
Collection<? extends DataExplorer> dataExplorers = Lookup.getDefault().lookupAll(DataExplorer.class); Collection<? extends DataExplorer> dataExplorers = Lookup.getDefault().lookupAll(DataExplorer.class);
for (DataExplorer de : dataExplorers) { for (DataExplorer de : dataExplorers) {
TopComponent explorerWin = de.getTopComponent(); TopComponent explorerWin = de.getTopComponent();
Mode m = WindowManager.getDefault().findMode("explorer"); //NON-NLS Mode explorerMode = WindowManager.getDefault().findMode("explorer"); //NON-NLS
if (m != null) { if (explorerMode == null) {
m.dockInto(explorerWin); // redock into the explorer mode
} else {
logger.log(Level.WARNING, "Could not find explorer mode and dock explorer window"); //NON-NLS logger.log(Level.WARNING, "Could not find explorer mode and dock explorer window"); //NON-NLS
} else {
explorerMode.dockInto(explorerWin); // redock into the explorer mode
} }
explorerWin.open(); // open that top component explorerWin.open(); // open that top component
} }
// find the data content top component // find the data content top component
TopComponent contentWin = DataContentTopComponent.findInstance(); TopComponent contentWin = DataContentTopComponent.findInstance();
Mode m = WindowManager.getDefault().findMode("output"); //NON-NLS Mode outputMode = WindowManager.getDefault().findMode("output"); //NON-NLS
if (m != null) { if (outputMode == null) {
m.dockInto(contentWin); // redock into the output mode
} else {
logger.log(Level.WARNING, "Could not find output mode and dock content window"); //NON-NLS logger.log(Level.WARNING, "Could not find output mode and dock content window"); //NON-NLS
} else {
outputMode.dockInto(contentWin); // redock into the output mode
} }
contentWin.open(); // open that top component contentWin.open(); // open that top component
@ -86,20 +85,15 @@ public class CoreComponentControl {
* be thrown from JFXPanel. * be thrown from JFXPanel.
*/ */
public static void closeCoreWindows() { public static void closeCoreWindows() {
WindowManager wm = WindowManager.getDefault();
Set<? extends Mode> modes = wm.getModes();
Iterator<? extends Mode> iter = wm.getModes().iterator();
TopComponent directoryTree = null; TopComponent directoryTree = null;
TopComponent favorites = null; TopComponent favorites = null;
String tcName = ""; final WindowManager windowManager = WindowManager.getDefault();
while (iter.hasNext()) { for (Mode mode : windowManager.getModes()) {
Mode mode = iter.next(); for (TopComponent tc : windowManager.getOpenedTopComponents(mode)) {
for (TopComponent tc : mode.getTopComponents()) { String tcName = tc.getName();
tcName = tc.getName();
if (tcName == null) { if (tcName == null) {
logger.log(Level.INFO, "tcName was null"); //NON-NLS logger.log(Level.INFO, "tcName was null"); //NON-NLS
tcName = "";
} }
// switch requires constant strings, so converted to if/else. // switch requires constant strings, so converted to if/else.
if (DIRECTORY_TREE.equals(tcName)) { if (DIRECTORY_TREE.equals(tcName)) {

View File

@ -185,7 +185,8 @@ public final class DataContentTopComponent extends TopComponent implements DataC
} catch (NoCurrentCaseException ex) { } catch (NoCurrentCaseException ex) {
return true; return true;
} }
return (!this.isDefault) || openCase.hasData() == false;
return (this.isDefault ==false) ||( openCase.hasData() == false);
} }
@Override @Override

View File

@ -407,19 +407,19 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.name"), ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.displayName"), NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.displayName"),
"", "",
file != null ? ContentUtils.getStringTime(file.getMtime(), file) : "")); file == null ? "" : ContentUtils.getStringTime(file.getMtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.name"), ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.displayName"), NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.displayName"),
"", "",
file != null ? ContentUtils.getStringTime(file.getCtime(), file) : "")); file == null ? "" : ContentUtils.getStringTime(file.getCtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.name"), ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.displayName"), NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.displayName"),
"", "",
file != null ? ContentUtils.getStringTime(file.getAtime(), file) : "")); file == null ? "" : ContentUtils.getStringTime(file.getAtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.name"), ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.displayName"), NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.displayName"),
"", "",
file != null ? ContentUtils.getStringTime(file.getCrtime(), file) : "")); file == null ? "" : ContentUtils.getStringTime(file.getCrtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.name"), ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.displayName"), NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.displayName"),
"", "",
@ -427,7 +427,7 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
ss.put(new NodeProperty<>(Bundle.BlackboardArtifactNode_createSheet_artifactMD5_name(), ss.put(new NodeProperty<>(Bundle.BlackboardArtifactNode_createSheet_artifactMD5_name(),
Bundle.BlackboardArtifactNode_createSheet_artifactMD5_displayName(), Bundle.BlackboardArtifactNode_createSheet_artifactMD5_displayName(),
"", "",
file != null ? StringUtils.defaultString(file.getMd5Hash()) : "")); file == null ? "" : StringUtils.defaultString(file.getMd5Hash())));
} }
} else { } else {
String dataSourceStr = ""; String dataSourceStr = "";
@ -456,7 +456,6 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
return s; return s;
} }
/** /**
* Used by (subclasses of) BlackboardArtifactNode to add the tags property * Used by (subclasses of) BlackboardArtifactNode to add the tags property
* to their sheets. * to their sheets.

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2017 Basis Technology Corp. * Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import javax.swing.Action; import javax.swing.Action;
@ -29,14 +28,12 @@ import org.openide.util.Utilities;
import org.sleuthkit.autopsy.actions.AddContentTagAction; import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction; import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint; import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.directorytree.ExtractAction; import org.sleuthkit.autopsy.directorytree.ExtractAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.directorytree.ViewContextAction; import org.sleuthkit.autopsy.directorytree.ViewContextAction;
import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction; import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction; import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Directory; import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
@ -46,8 +43,6 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
*/ */
public class DirectoryNode extends AbstractFsContentNode<AbstractFile> { public class DirectoryNode extends AbstractFsContentNode<AbstractFile> {
private static final Logger LOGGER = Logger.getLogger(DirectoryNode.class.getName());
public static final String DOTDOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.parFolder.text"); public static final String DOTDOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.parFolder.text");
public static final String DOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.curFolder.text"); public static final String DOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.curFolder.text");
@ -95,7 +90,7 @@ public class DirectoryNode extends AbstractFsContentNode<AbstractFile> {
actionsList.add(null); // creates a menu separator actionsList.add(null); // creates a menu separator
actionsList.add(ExtractAction.getInstance()); actionsList.add(ExtractAction.getInstance());
actionsList.add(null); // creates a menu separator actionsList.add(null); // creates a menu separator
actionsList.add(new RunIngestModulesAction(Collections.<Content>singletonList(content))); actionsList.add(new RunIngestModulesAction(content));
actionsList.add(null); // creates a menu separator actionsList.add(null); // creates a menu separator
actionsList.add(AddContentTagAction.getInstance()); actionsList.add(AddContentTagAction.getInstance());

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2017 Basis Technology Corp. * Copyright 2017-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -19,7 +19,6 @@
package org.sleuthkit.autopsy.datamodel; package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.List; import java.util.List;
import javax.swing.Action; import javax.swing.Action;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
@ -29,7 +28,6 @@ import org.sleuthkit.autopsy.directorytree.FileSearchAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction; import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction; import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction;
import org.sleuthkit.datamodel.SpecialDirectory; import org.sleuthkit.datamodel.SpecialDirectory;
import org.sleuthkit.datamodel.Content;
/** /**
* Parent class for special directory types (Local and Virtual) * Parent class for special directory types (Local and Virtual)
@ -61,9 +59,8 @@ public abstract class SpecialDirectoryNode extends AbstractAbstractFileNode<Spec
actions.add(null); // creates a menu separator actions.add(null); // creates a menu separator
actions.add(ExtractAction.getInstance()); actions.add(ExtractAction.getInstance());
actions.add(null); // creates a menu separator actions.add(null); // creates a menu separator
actions.add(new FileSearchAction( actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text()));
Bundle.ImageNode_getActions_openFileSearchByAttr_text())); actions.add(new RunIngestModulesAction(content));
actions.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));
actions.addAll(ContextMenuExtensionPoint.getActions()); actions.addAll(ContextMenuExtensionPoint.getActions());
return actions.toArray(new Action[0]); return actions.toArray(new Action[0]);
} }

View File

@ -88,7 +88,7 @@ import org.sleuthkit.datamodel.TskData.DbType;
final public class Accounts implements AutopsyVisitableItem { final public class Accounts implements AutopsyVisitableItem {
private static final Logger LOGGER = Logger.getLogger(Accounts.class.getName()); private static final Logger LOGGER = Logger.getLogger(Accounts.class.getName());
private static final String iconBasePath = "/org/sleuthkit/autopsy/images/"; //NON-NLS private static final String ICON_BASE_PATH = "/org/sleuthkit/autopsy/images/"; //NON-NLS
@NbBundle.Messages("AccountsRootNode.name=Accounts") @NbBundle.Messages("AccountsRootNode.name=Accounts")
final public static String NAME = Bundle.AccountsRootNode_name(); final public static String NAME = Bundle.AccountsRootNode_name();
@ -96,10 +96,8 @@ final public class Accounts implements AutopsyVisitableItem {
private SleuthkitCase skCase; private SleuthkitCase skCase;
private final EventBus reviewStatusBus = new EventBus("ReviewStatusBus"); private final EventBus reviewStatusBus = new EventBus("ReviewStatusBus");
/** /* Should rejected accounts be shown in the accounts section of the tree. */
* Should rejected accounts be shown in the accounts section of the tree. private boolean showRejected = false; //NOPMD redundant initializer
*/
private boolean showRejected = false;
private final RejectAccounts rejectActionInstance; private final RejectAccounts rejectActionInstance;
private final ApproveAccounts approveActionInstance; private final ApproveAccounts approveActionInstance;
@ -717,8 +715,8 @@ final public class Accounts implements AutopsyVisitableItem {
@Override @Override
protected boolean createKeys(List<FileWithCCN> list) { protected boolean createKeys(List<FileWithCCN> list) {
String query String query =
= "SELECT blackboard_artifacts.obj_id," //NON-NLS "SELECT blackboard_artifacts.obj_id," //NON-NLS
+ " solr_attribute.value_text AS solr_document_id, "; //NON-NLS + " solr_attribute.value_text AS solr_document_id, "; //NON-NLS
if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) { if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) {
query += " string_agg(blackboard_artifacts.artifact_id::character varying, ',') AS artifact_IDs, " //NON-NLS query += " string_agg(blackboard_artifacts.artifact_id::character varying, ',') AS artifact_IDs, " //NON-NLS
@ -739,14 +737,14 @@ final public class Accounts implements AutopsyVisitableItem {
+ " GROUP BY blackboard_artifacts.obj_id, solr_document_id " //NON-NLS + " GROUP BY blackboard_artifacts.obj_id, solr_document_id " //NON-NLS
+ " ORDER BY hits DESC "; //NON-NLS + " ORDER BY hits DESC "; //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query); try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) { ResultSet resultSet = results.getResultSet();) {
while (rs.next()) { while (resultSet.next()) {
list.add(new FileWithCCN( list.add(new FileWithCCN(
rs.getLong("obj_id"), //NON-NLS resultSet.getLong("obj_id"), //NON-NLS
rs.getString("solr_document_id"), //NON-NLS resultSet.getString("solr_document_id"), //NON-NLS
unGroupConcat(rs.getString("artifact_IDs"), Long::valueOf), //NON-NLS unGroupConcat(resultSet.getString("artifact_IDs"), Long::valueOf), //NON-NLS
rs.getLong("hits"), //NON-NLS resultSet.getLong("hits"), //NON-NLS
new HashSet<>(unGroupConcat(rs.getString("review_status_ids"), id -> BlackboardArtifact.ReviewStatus.withID(Integer.valueOf(id)))))); //NON-NLS new HashSet<>(unGroupConcat(resultSet.getString("review_status_ids"), reviewStatusID -> BlackboardArtifact.ReviewStatus.withID(Integer.valueOf(reviewStatusID)))))); //NON-NLS
} }
} catch (TskCoreException | SQLException ex) { } catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for files with ccn hits.", ex); //NON-NLS LOGGER.log(Level.SEVERE, "Error querying for files with ccn hits.", ex); //NON-NLS
@ -794,8 +792,8 @@ final public class Accounts implements AutopsyVisitableItem {
"# {0} - number of children", "# {0} - number of children",
"Accounts.ByFileNode.displayName=By File ({0})"}) "Accounts.ByFileNode.displayName=By File ({0})"})
private void updateDisplayName() { private void updateDisplayName() {
String query String query =
= "SELECT count(*) FROM ( SELECT count(*) AS documents " "SELECT count(*) FROM ( SELECT count(*) AS documents "
+ " FROM blackboard_artifacts " //NON-NLS + " FROM blackboard_artifacts " //NON-NLS
+ " LEFT JOIN blackboard_attributes as solr_attribute ON blackboard_artifacts.artifact_id = solr_attribute.artifact_id " //NON-NLS + " LEFT JOIN blackboard_attributes as solr_attribute ON blackboard_artifacts.artifact_id = solr_attribute.artifact_id " //NON-NLS
+ " AND solr_attribute.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_DOCUMENT_ID.getTypeID() //NON-NLS + " AND solr_attribute.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_DOCUMENT_ID.getTypeID() //NON-NLS
@ -806,12 +804,12 @@ final public class Accounts implements AutopsyVisitableItem {
+ getRejectedArtifactFilterClause() + getRejectedArtifactFilterClause()
+ " GROUP BY blackboard_artifacts.obj_id, solr_attribute.value_text ) AS foo"; + " GROUP BY blackboard_artifacts.obj_id, solr_attribute.value_text ) AS foo";
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query); try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) { ResultSet resultSet = results.getResultSet();) {
while (rs.next()) { while (resultSet.next()) {
if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) { if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) {
setDisplayName(Bundle.Accounts_ByFileNode_displayName(rs.getLong("count"))); setDisplayName(Bundle.Accounts_ByFileNode_displayName(resultSet.getLong("count")));
} else { } else {
setDisplayName(Bundle.Accounts_ByFileNode_displayName(rs.getLong("count(*)"))); setDisplayName(Bundle.Accounts_ByFileNode_displayName(resultSet.getLong("count(*)")));
} }
} }
} catch (TskCoreException | SQLException ex) { } catch (TskCoreException | SQLException ex) {
@ -872,7 +870,7 @@ final public class Accounts implements AutopsyVisitableItem {
&& eventData.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()) { && eventData.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()) {
reviewStatusBus.post(eventData); reviewStatusBus.post(eventData);
} }
} catch (NoCurrentCaseException notUsed) { } catch (NoCurrentCaseException notUsed) { //NOPMD empy catch clause
// Case is closed, do nothing. // Case is closed, do nothing.
} }
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString()) } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
@ -887,17 +885,16 @@ final public class Accounts implements AutopsyVisitableItem {
Case.getOpenCase(); Case.getOpenCase();
refresh(true); refresh(true);
} catch (NoCurrentCaseException notUsed) { } catch (NoCurrentCaseException notUsed) { //NOPMD empy catch clause
// Case is closed, do nothing. // Case is closed, do nothing.
} }
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) { } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())
&& (evt.getNewValue() == null)) {
// case was closed. Remove listeners so that we don't get called with a stale case handle // case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify(); removeNotify();
skCase = null; skCase = null;
} }
} }
}
}; };
@Override @Override
@ -931,8 +928,8 @@ final public class Accounts implements AutopsyVisitableItem {
RangeMap<Integer, BinResult> binRanges = TreeRangeMap.create(); RangeMap<Integer, BinResult> binRanges = TreeRangeMap.create();
String query String query =
= "SELECT SUBSTR(blackboard_attributes.value_text,1,8) AS BIN, " //NON-NLS "SELECT SUBSTR(blackboard_attributes.value_text,1,8) AS BIN, " //NON-NLS
+ " COUNT(blackboard_artifacts.artifact_id) AS count " //NON-NLS + " COUNT(blackboard_artifacts.artifact_id) AS count " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS + " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS + " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS
@ -941,8 +938,8 @@ final public class Accounts implements AutopsyVisitableItem {
+ getRejectedArtifactFilterClause() + getRejectedArtifactFilterClause()
+ " GROUP BY BIN " //NON-NLS + " GROUP BY BIN " //NON-NLS
+ " ORDER BY BIN "; //NON-NLS + " ORDER BY BIN "; //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query)) { try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet resultSet = results.getResultSet(); ResultSet resultSet = results.getResultSet();) {
//sort all te individual bins in to the ranges //sort all te individual bins in to the ranges
while (resultSet.next()) { while (resultSet.next()) {
final Integer bin = Integer.valueOf(resultSet.getString("BIN")); final Integer bin = Integer.valueOf(resultSet.getString("BIN"));
@ -956,16 +953,15 @@ final public class Accounts implements AutopsyVisitableItem {
count += previousResult.getCount(); count += previousResult.getCount();
} }
if (binRange != null) { if (binRange == null) {
binRanges.put(Range.closed(binRange.getBINstart(), binRange.getBINend()), new BinResult(count, binRange));
} else {
binRanges.put(Range.closed(bin, bin), new BinResult(count, bin, bin)); binRanges.put(Range.closed(bin, bin), new BinResult(count, bin, bin));
} else {
binRanges.put(Range.closed(binRange.getBINstart(), binRange.getBINend()), new BinResult(count, binRange));
} }
} }
binRanges.asMapOfRanges().values().forEach(list::add); binRanges.asMapOfRanges().values().forEach(list::add);
} catch (TskCoreException | SQLException ex) { } catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for BINs.", ex); //NON-NLS LOGGER.log(Level.SEVERE, "Error querying for BINs.", ex); //NON-NLS
} }
return true; return true;
@ -999,15 +995,15 @@ final public class Accounts implements AutopsyVisitableItem {
"# {0} - number of children", "# {0} - number of children",
"Accounts.ByBINNode.displayName=By BIN ({0})"}) "Accounts.ByBINNode.displayName=By BIN ({0})"})
private void updateDisplayName() { private void updateDisplayName() {
String query String query =
= "SELECT count(distinct SUBSTR(blackboard_attributes.value_text,1,8)) AS BINs " //NON-NLS "SELECT count(distinct SUBSTR(blackboard_attributes.value_text,1,8)) AS BINs " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS + " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS + " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER.getTypeID() //NON-NLS + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER.getTypeID() //NON-NLS
+ getRejectedArtifactFilterClause(); //NON-NLS + getRejectedArtifactFilterClause(); //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query)) { try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet resultSet = results.getResultSet(); ResultSet resultSet = results.getResultSet();) {
while (resultSet.next()) { while (resultSet.next()) {
setDisplayName(Bundle.Accounts_ByBINNode_displayName(resultSet.getLong("BINs"))); setDisplayName(Bundle.Accounts_ByBINNode_displayName(resultSet.getLong("BINs")));
} }
@ -1188,7 +1184,8 @@ final public class Accounts implements AutopsyVisitableItem {
* @param key The FileWithCCN that backs this node. * @param key The FileWithCCN that backs this node.
* @param content The Content object the key represents. * @param content The Content object the key represents.
* @param lookupContents The contents of this Node's lookup. It should * @param lookupContents The contents of this Node's lookup. It should
* contain the content object and the account artifacts. * contain the content object and the account
* artifacts.
*/ */
@NbBundle.Messages({ @NbBundle.Messages({
"# {0} - raw file name", "# {0} - raw file name",
@ -1226,29 +1223,29 @@ final public class Accounts implements AutopsyVisitableItem {
"Accounts.FileWithCCNNode.statusProperty.displayName=Status", "Accounts.FileWithCCNNode.statusProperty.displayName=Status",
"Accounts.FileWithCCNNode.noDescription=no description"}) "Accounts.FileWithCCNNode.noDescription=no description"})
protected Sheet createSheet() { protected Sheet createSheet() {
Sheet s = super.createSheet(); Sheet sheet = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES); Sheet.Set propSet = sheet.get(Sheet.PROPERTIES);
if (ss == null) { if (propSet == null) {
ss = Sheet.createPropertiesSet(); propSet = Sheet.createPropertiesSet();
s.put(ss); sheet.put(propSet);
} }
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(), propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(), Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(), Bundle.Accounts_FileWithCCNNode_noDescription(),
fileName)); fileName));
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(), propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(), Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(), Bundle.Accounts_FileWithCCNNode_noDescription(),
fileKey.getHits())); fileKey.getHits()));
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(), propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(), Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(), Bundle.Accounts_FileWithCCNNode_noDescription(),
fileKey.getStatuses().stream() fileKey.getStatuses().stream()
.map(BlackboardArtifact.ReviewStatus::getDisplayName) .map(BlackboardArtifact.ReviewStatus::getDisplayName)
.collect(Collectors.joining(", ")))); //NON-NLS .collect(Collectors.joining(", ")))); //NON-NLS
return s; return sheet;
} }
@Override @Override
@ -1292,8 +1289,8 @@ final public class Accounts implements AutopsyVisitableItem {
@Override @Override
protected boolean createKeys(List<Long> list) { protected boolean createKeys(List<Long> list) {
String query String query =
= "SELECT blackboard_artifacts.artifact_id " //NON-NLS "SELECT blackboard_artifacts.artifact_id " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS + " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS + " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
@ -1339,9 +1336,7 @@ final public class Accounts implements AutopsyVisitableItem {
final public class BINNode extends DisplayableItemNode { final public class BINNode extends DisplayableItemNode {
/** /** Creates the nodes for the credit card numbers */
* Creates the nodes for the credit card numbers
*/
private final BinResult bin; private final BinResult bin;
private BINNode(BinResult bin) { private BINNode(BinResult bin) {
@ -1365,8 +1360,8 @@ final public class Accounts implements AutopsyVisitableItem {
} }
private void updateDisplayName() { private void updateDisplayName() {
String query String query =
= "SELECT count(blackboard_artifacts.artifact_id ) AS count" //NON-NLS "SELECT count(blackboard_artifacts.artifact_id ) AS count" //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS + " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS + " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS + " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
@ -1374,9 +1369,9 @@ final public class Accounts implements AutopsyVisitableItem {
+ " AND blackboard_attributes.value_text >= '" + bin.getBINStart() + "' AND blackboard_attributes.value_text < '" + (bin.getBINEnd() + 1) + "'" //NON-NLS + " AND blackboard_attributes.value_text >= '" + bin.getBINStart() + "' AND blackboard_attributes.value_text < '" + (bin.getBINEnd() + 1) + "'" //NON-NLS
+ getRejectedArtifactFilterClause(); + getRejectedArtifactFilterClause();
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query); try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) { ResultSet resultSet = results.getResultSet();) {
while (rs.next()) { while (resultSet.next()) {
setDisplayName(getBinRangeString(bin) + " (" + rs.getLong("count") + ")"); //NON-NLS setDisplayName(getBinRangeString(bin) + " (" + resultSet.getLong("count") + ")"); //NON-NLS
} }
} catch (TskCoreException | SQLException ex) { } catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for account artifacts.", ex); //NON-NLS LOGGER.log(Level.SEVERE, "Error querying for account artifacts.", ex); //NON-NLS
@ -1506,9 +1501,7 @@ final public class Accounts implements AutopsyVisitableItem {
return true; return true;
} }
/** /** The number of accounts with this BIN */
* The number of accounts with this BIN
*/
private final long count; private final long count;
private final BINRange binRange; private final BINRange binRange;
@ -1598,7 +1591,7 @@ final public class Accounts implements AutopsyVisitableItem {
private AccountArtifactNode(BlackboardArtifact artifact) { private AccountArtifactNode(BlackboardArtifact artifact) {
super(artifact, "org/sleuthkit/autopsy/images/credit-card.png"); //NON-NLS super(artifact, "org/sleuthkit/autopsy/images/credit-card.png"); //NON-NLS
this.artifact = artifact; this.artifact = artifact;
setName("" + this.artifact.getArtifactID()); setName(Long.toString(this.artifact.getArtifactID()));
reviewStatusBus.register(this); reviewStatusBus.register(this);
} }
@ -1728,7 +1721,7 @@ final public class Accounts implements AutopsyVisitableItem {
selectedPaths.forEach(path -> { selectedPaths.forEach(path -> {
try { try {
toArray.add(NodeOp.findPath(rootNode, path)); toArray.add(NodeOp.findPath(rootNode, path));
} catch (NodeNotFoundException ex) { } catch (NodeNotFoundException ex) { //NOPMD empty catch clause
//just ingnore paths taht don't exist. this is expected since we are rejecting //just ingnore paths taht don't exist. this is expected since we are rejecting
} }
}); });
@ -1772,25 +1765,25 @@ final public class Accounts implements AutopsyVisitableItem {
public static String getIconFilePath(Account.Type type) { public static String getIconFilePath(Account.Type type) {
if (type.equals(Account.Type.CREDIT_CARD)) { if (type.equals(Account.Type.CREDIT_CARD)) {
return iconBasePath + "credit-card.png"; return ICON_BASE_PATH + "credit-card.png";
} else if (type.equals(Account.Type.DEVICE)) { } else if (type.equals(Account.Type.DEVICE)) {
return iconBasePath + "image.png"; return ICON_BASE_PATH + "image.png";
} else if (type.equals(Account.Type.EMAIL)) { } else if (type.equals(Account.Type.EMAIL)) {
return iconBasePath + "email.png"; return ICON_BASE_PATH + "email.png";
} else if (type.equals(Account.Type.FACEBOOK)) { } else if (type.equals(Account.Type.FACEBOOK)) {
return iconBasePath + "facebook.png"; return ICON_BASE_PATH + "facebook.png";
} else if (type.equals(Account.Type.INSTAGRAM)) { } else if (type.equals(Account.Type.INSTAGRAM)) {
return iconBasePath + "instagram.png"; return ICON_BASE_PATH + "instagram.png";
} else if (type.equals(Account.Type.MESSAGING_APP)) { } else if (type.equals(Account.Type.MESSAGING_APP)) {
return iconBasePath + "messaging.png"; return ICON_BASE_PATH + "messaging.png";
} else if (type.equals(Account.Type.PHONE)) { } else if (type.equals(Account.Type.PHONE)) {
return iconBasePath + "phone.png"; return ICON_BASE_PATH + "phone.png";
} else if (type.equals(Account.Type.TWITTER)) { } else if (type.equals(Account.Type.TWITTER)) {
return iconBasePath + "twitter.png"; return ICON_BASE_PATH + "twitter.png";
} else if (type.equals(Account.Type.WEBSITE)) { } else if (type.equals(Account.Type.WEBSITE)) {
return iconBasePath + "web-file.png"; return ICON_BASE_PATH + "web-file.png";
} else if (type.equals(Account.Type.WHATSAPP)) { } else if (type.equals(Account.Type.WHATSAPP)) {
return iconBasePath + "WhatsApp.png"; return ICON_BASE_PATH + "WhatsApp.png";
} else { } else {
//there could be a default icon instead... //there could be a default icon instead...
throw new IllegalArgumentException("Unknown Account.Type: " + type.getTypeName()); throw new IllegalArgumentException("Unknown Account.Type: " + type.getTypeName());

View File

@ -57,13 +57,16 @@ final class DataSourceIngestJob {
/** /**
* These fields define a data source ingest job: the parent ingest job, an * These fields define a data source ingest job: the parent ingest job, an
* ID, the user's ingest job settings, and the data source to be processed. * ID, the user's ingest job settings, and the data source to be analyzed.
* Optionally, there is a set of files to be analyzed instead of analyzing
* all of the files in the data source.
*/ */
private final IngestJob parentJob; private final IngestJob parentJob;
private static final AtomicLong nextJobId = new AtomicLong(0L); private static final AtomicLong nextJobId = new AtomicLong(0L);
private final long id; private final long id;
private final IngestJobSettings settings; private final IngestJobSettings settings;
private final Content dataSource; private final Content dataSource;
private final List<AbstractFile> files = new ArrayList<>();
/** /**
* A data source ingest job runs in stages. * A data source ingest job runs in stages.
@ -171,7 +174,7 @@ final class DataSourceIngestJob {
/** /**
* Constructs an object that encapsulates a data source and the ingest * Constructs an object that encapsulates a data source and the ingest
* module pipelines used to process it. * module pipelines used to analyze it.
* *
* @param parentJob The ingest job of which this data source ingest * @param parentJob The ingest job of which this data source ingest
* job is a part. * job is a part.
@ -181,9 +184,27 @@ final class DataSourceIngestJob {
* progress handles. * progress handles.
*/ */
DataSourceIngestJob(IngestJob parentJob, Content dataSource, IngestJobSettings settings, boolean runInteractively) { DataSourceIngestJob(IngestJob parentJob, Content dataSource, IngestJobSettings settings, boolean runInteractively) {
this(parentJob, dataSource, Collections.emptyList(), settings, runInteractively);
}
/**
* Constructs an object that encapsulates a data source and the ingest
* module pipelines used to analyze it. Either all of the files in the data
* source or a given subset of the files will be analyzed.
*
* @param parentJob The ingest job of which this data source ingest
* job is a part.
* @param dataSource The data source to be ingested.
* @param files A subset of the files for the data source.
* @param settings The settings for the ingest job.
* @param runInteractively Whether or not this job should use NetBeans
* progress handles.
*/
DataSourceIngestJob(IngestJob parentJob, Content dataSource, List<AbstractFile> files, IngestJobSettings settings, boolean runInteractively) {
this.parentJob = parentJob; this.parentJob = parentJob;
this.id = DataSourceIngestJob.nextJobId.getAndIncrement(); this.id = DataSourceIngestJob.nextJobId.getAndIncrement();
this.dataSource = dataSource; this.dataSource = dataSource;
this.files.addAll(files);
this.settings = settings; this.settings = settings;
this.doUI = runInteractively; this.doUI = runInteractively;
this.createTime = new Date().getTime(); this.createTime = new Date().getTime();
@ -497,13 +518,13 @@ final class DataSourceIngestJob {
*/ */
if (this.hasFirstStageDataSourceIngestPipeline() && this.hasFileIngestPipeline()) { if (this.hasFirstStageDataSourceIngestPipeline() && this.hasFileIngestPipeline()) {
logger.log(Level.INFO, "Scheduling first stage data source and file level analysis tasks for {0} (jobId={1})", new Object[]{dataSource.getName(), this.id}); //NON-NLS logger.log(Level.INFO, "Scheduling first stage data source and file level analysis tasks for {0} (jobId={1})", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleIngestTasks(this); DataSourceIngestJob.taskScheduler.scheduleIngestTasks(this, this.files);
} else if (this.hasFirstStageDataSourceIngestPipeline()) { } else if (this.hasFirstStageDataSourceIngestPipeline()) {
logger.log(Level.INFO, "Scheduling first stage data source level analysis tasks for {0} (jobId={1}), no file level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS logger.log(Level.INFO, "Scheduling first stage data source level analysis tasks for {0} (jobId={1}), no file level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleDataSourceIngestTask(this); DataSourceIngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
} else { } else {
logger.log(Level.INFO, "Scheduling file level analysis tasks for {0} (jobId={1}), no first stage data source level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS logger.log(Level.INFO, "Scheduling file level analysis tasks for {0} (jobId={1}), no first stage data source level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleFileIngestTasks(this); DataSourceIngestJob.taskScheduler.scheduleFileIngestTasks(this, this.files);
/** /**
* No data source ingest task has been scheduled for this stage, and * No data source ingest task has been scheduled for this stage, and
@ -815,7 +836,7 @@ final class DataSourceIngestJob {
void addFiles(List<AbstractFile> files) { void addFiles(List<AbstractFile> files) {
if (DataSourceIngestJob.Stages.FIRST == this.stage) { if (DataSourceIngestJob.Stages.FIRST == this.stage) {
for (AbstractFile file : files) { for (AbstractFile file : files) {
DataSourceIngestJob.taskScheduler.scheduleFileIngestTask(this, file); DataSourceIngestJob.taskScheduler.scheduleFastTrackedFileIngestTask(this, file);
} }
} else { } else {
DataSourceIngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS DataSourceIngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2016 Basis Technology Corp. * Copyright 2014-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -29,13 +29,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.openide.util.NbBundle; import org.openide.util.NbBundle;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
/** /**
* Runs a collection of data sources through a set of ingest modules specified * Analyzes one or more data sources using a set of ingest modules specified via
* via ingest job settings. * ingest job settings.
* <p>
* This class is thread-safe.
*/ */
public final class IngestJob { public final class IngestJob {
@ -69,7 +68,7 @@ public final class IngestJob {
private volatile CancellationReason cancellationReason; private volatile CancellationReason cancellationReason;
/** /**
* Constructs an ingest job that runs a collection of data sources through a * Constructs an ingest job that analyzes one or more data sources using a
* set of ingest modules specified via ingest job settings. * set of ingest modules specified via ingest job settings.
* *
* @param dataSources The data sources to be ingested. * @param dataSources The data sources to be ingested.
@ -88,6 +87,26 @@ public final class IngestJob {
cancellationReason = CancellationReason.NOT_CANCELLED; cancellationReason = CancellationReason.NOT_CANCELLED;
} }
/**
* Constructs an ingest job that analyzes one data source using a set of
* ingest modules specified via ingest job settings. Either all of the files
* in the data source or a given subset of the files will be analyzed.
*
* @param dataSource The data source to be analyzed
* @param files A subset of the files for the data source.
* @param settings The ingest job settings.
* @param doUI Whether or not this job should use progress bars,
* message boxes for errors, etc.
*/
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings, boolean doUI) {
this.id = IngestJob.nextId.getAndIncrement();
this.dataSourceJobs = new ConcurrentHashMap<>();
DataSourceIngestJob dataSourceIngestJob = new DataSourceIngestJob(this, dataSource, files, settings, doUI);
this.dataSourceJobs.put(dataSourceIngestJob.getId(), dataSourceIngestJob);
incompleteJobsCount = new AtomicInteger(dataSourceJobs.size());
cancellationReason = CancellationReason.NOT_CANCELLED;
}
/** /**
* Gets the unique identifier assigned to this ingest job. * Gets the unique identifier assigned to this ingest job.
* *

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -298,7 +298,7 @@ public class IngestManager {
/** /**
* Queues an ingest job for for one or more data sources. * Queues an ingest job for for one or more data sources.
* *
* @param dataSources The data sources to process. * @param dataSources The data sources to analyze.
* @param settings The settings for the ingest job. * @param settings The settings for the ingest job.
*/ */
public void queueIngestJob(Collection<Content> dataSources, IngestJobSettings settings) { public void queueIngestJob(Collection<Content> dataSources, IngestJobSettings settings) {
@ -312,6 +312,25 @@ public class IngestManager {
} }
} }
/**
* Queues an ingest job for for a data source. Either all of the files in
* the data source or a given subset of the files will be analyzed.
*
* @param dataSource The data source to analyze.
* @param files A subset of the files for the data source.
* @param settings The settings for the ingest job.
*/
public void queueIngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
if (caseIsOpen) {
IngestJob job = new IngestJob(dataSource, files, settings, RuntimeProperties.runningWithGUI());
if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet();
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
startIngestJobFutures.put(taskId, task);
}
}
}
/** /**
* Immdiately starts an ingest job for one or more data sources. * Immdiately starts an ingest job for one or more data sources.
* *

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2012-2017 Basis Technology Corp. * Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -143,15 +143,14 @@ final class IngestTasksScheduler {
} }
/** /**
* Schedules a data source ingest task and file ingest tasks for an ingest * Schedules a data source level ingest task and file level ingest tasks for
* job. * an ingest job. Either all of the files in the data source or a given
* subset of the files will be scheduled.
* *
* @param job The job for which the tasks are to be scheduled. * @param job The data source ingest job.
* * @param files A subset of the files for the data source.
* @throws InterruptedException if the calling thread is blocked due to a
* full tasks queue and is interrupted.
*/ */
synchronized void scheduleIngestTasks(DataSourceIngestJob job) { synchronized void scheduleIngestTasks(DataSourceIngestJob job, List<AbstractFile> files) {
if (!job.isCancelled()) { if (!job.isCancelled()) {
// Scheduling of both a data source ingest task and file ingest tasks // Scheduling of both a data source ingest task and file ingest tasks
// for a job must be an atomic operation. Otherwise, the data source // for a job must be an atomic operation. Otherwise, the data source
@ -159,14 +158,14 @@ final class IngestTasksScheduler {
// resulting in a potential false positive when another thread checks // resulting in a potential false positive when another thread checks
// whether or not all the tasks for the job are completed. // whether or not all the tasks for the job are completed.
this.scheduleDataSourceIngestTask(job); this.scheduleDataSourceIngestTask(job);
this.scheduleFileIngestTasks(job); this.scheduleFileIngestTasks(job, files);
} }
} }
/** /**
* Schedules a data source ingest task for an ingest job. * Schedules a data source level ingest task for a data source ingest job.
* *
* @param job The job for which the tasks are to be scheduled. * @param job The data source ingest job.
*/ */
synchronized void scheduleDataSourceIngestTask(DataSourceIngestJob job) { synchronized void scheduleDataSourceIngestTask(DataSourceIngestJob job) {
if (!job.isCancelled()) { if (!job.isCancelled()) {
@ -186,16 +185,22 @@ final class IngestTasksScheduler {
} }
/** /**
* Schedules file ingest tasks for an ingest job. * Schedules file level ingest tasks for a data source ingest job. Either
* all of the files in the data source or a given subset of the files will
* be scheduled.
* *
* @param job The job for which the tasks are to be scheduled. * @param job The data source ingest job.
* @param files A subset of the files for the data source.
*/ */
synchronized void scheduleFileIngestTasks(DataSourceIngestJob job) { synchronized void scheduleFileIngestTasks(DataSourceIngestJob job, List<AbstractFile> files) {
if (!job.isCancelled()) { if (!job.isCancelled()) {
// Get the top level files for the data source associated with this job List<AbstractFile> candidateFiles = new ArrayList<>();
// and add them to the root directories priority queue. if (files.isEmpty()) {
List<AbstractFile> topLevelFiles = getTopLevelFiles(job.getDataSource()); getTopLevelFiles(job.getDataSource(), candidateFiles);
for (AbstractFile firstLevelFile : topLevelFiles) { } else {
candidateFiles.addAll(files);
}
for (AbstractFile firstLevelFile : candidateFiles) {
FileIngestTask task = new FileIngestTask(job, firstLevelFile); FileIngestTask task = new FileIngestTask(job, firstLevelFile);
if (IngestTasksScheduler.shouldEnqueueFileTask(task)) { if (IngestTasksScheduler.shouldEnqueueFileTask(task)) {
this.tasksInProgress.add(task); this.tasksInProgress.add(task);
@ -207,12 +212,14 @@ final class IngestTasksScheduler {
} }
/** /**
* Schedules a file ingest task for an ingest job. * Schedules a file ingest task for a data source ingest job. The task that
* is created is added directly to the pending file tasks queues, i.e., it
* is "fast tracked."
* *
* @param job The job for which the tasks are to be scheduled. * @param job The data source ingest job.
* @param file The file to be associated with the task. * @param file A file.
*/ */
synchronized void scheduleFileIngestTask(DataSourceIngestJob job, AbstractFile file) { synchronized void scheduleFastTrackedFileIngestTask(DataSourceIngestJob job, AbstractFile file) {
if (!job.isCancelled()) { if (!job.isCancelled()) {
FileIngestTask task = new FileIngestTask(job, file); FileIngestTask task = new FileIngestTask(job, file);
if (IngestTasksScheduler.shouldEnqueueFileTask(task)) { if (IngestTasksScheduler.shouldEnqueueFileTask(task)) {
@ -280,11 +287,9 @@ final class IngestTasksScheduler {
* tasks to put into the root directories queue. * tasks to put into the root directories queue.
* *
* @param dataSource The data source. * @param dataSource The data source.
* * @param topLevelFiles The top level files are added to this list.
* @return A list of top level files.
*/ */
private static List<AbstractFile> getTopLevelFiles(Content dataSource) { private static void getTopLevelFiles(Content dataSource, List<AbstractFile> topLevelFiles) {
List<AbstractFile> topLevelFiles = new ArrayList<>();
Collection<AbstractFile> rootObjects = dataSource.accept(new GetRootDirectoryVisitor()); Collection<AbstractFile> rootObjects = dataSource.accept(new GetRootDirectoryVisitor());
if (rootObjects.isEmpty() && dataSource instanceof AbstractFile) { if (rootObjects.isEmpty() && dataSource instanceof AbstractFile) {
// The data source is itself a file to be processed. // The data source is itself a file to be processed.
@ -312,7 +317,6 @@ final class IngestTasksScheduler {
} }
} }
} }
return topLevelFiles;
} }
/** /**
@ -405,7 +409,7 @@ final class IngestTasksScheduler {
return false; return false;
} }
/** /*
* Check if the file is a member of the file ingest filter that is being * Check if the file is a member of the file ingest filter that is being
* applied to the current run of ingest, checks if unallocated space * applied to the current run of ingest, checks if unallocated space
* should be processed inside call to fileIsMemberOf * should be processed inside call to fileIsMemberOf
@ -414,8 +418,8 @@ final class IngestTasksScheduler {
return false; return false;
} }
// Skip the task if the file is one of a select group of special, large // Skip the task if the file is one of a select group of special, large
// NTFS or FAT file system files. // NTFS or FAT file system files.
if (file instanceof org.sleuthkit.datamodel.File) { if (file instanceof org.sleuthkit.datamodel.File) {
final org.sleuthkit.datamodel.File f = (org.sleuthkit.datamodel.File) file; final org.sleuthkit.datamodel.File f = (org.sleuthkit.datamodel.File) file;

View File

@ -1,7 +1,7 @@
/* /*
* Autopsy Forensic Browser * Autopsy Forensic Browser
* *
* Copyright 2011-2018 Basis Technology Corp. * Copyright 2017-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org * Contact: carrier <at> sleuthkit <dot> org
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
@ -23,6 +23,7 @@ import java.awt.event.ActionEvent;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.logging.Level;
import javax.swing.AbstractAction; import javax.swing.AbstractAction;
import javax.swing.Action; import javax.swing.Action;
import javax.swing.JOptionPane; import javax.swing.JOptionPane;
@ -31,26 +32,33 @@ import org.openide.DialogDisplayer;
import org.openide.WizardDescriptor; import org.openide.WizardDescriptor;
import org.openide.util.NbBundle.Messages; import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager; import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.SpecialDirectoryNode;
import org.sleuthkit.autopsy.ingest.IngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestManager; import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content; import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Directory; import org.sleuthkit.datamodel.TskCoreException;
/** /**
* This class is used to add the action to the run ingest modules menu item. * An action that invokes the Run Ingest Modules wizard for one or more data
* When the data source is pressed, it should open the wizard for ingest * sources or for the children of a file.
* modules.
*/ */
public final class RunIngestModulesAction extends AbstractAction { public final class RunIngestModulesAction extends AbstractAction {
@Messages("RunIngestModulesAction.name=Run Ingest Modules") @Messages("RunIngestModulesAction.name=Run Ingest Modules")
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(SpecialDirectoryNode.class.getName());
/* /*
* Note that the execution context is the name of the dialog that used to be * Note that the execution context is the name of the dialog that used to be
* used instead of this wizard and is retained for backwards compatibility. * used instead of this wizard and is retained for backwards compatibility.
*/ */
private static final String EXECUTION_CONTEXT = "org.sleuthkit.autopsy.ingest.RunIngestModulesDialog"; private static final String EXECUTION_CONTEXT = "org.sleuthkit.autopsy.ingest.RunIngestModulesDialog";
private final List<Content> dataSources = new ArrayList<>();
private final IngestJobSettings.IngestType ingestType;
private final AbstractFile parentFile;
/** /**
* Display any warnings that the ingestJobSettings have. * Display any warnings that the ingestJobSettings have.
@ -67,12 +75,10 @@ public final class RunIngestModulesAction extends AbstractAction {
JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), warningMessage.toString()); JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), warningMessage.toString());
} }
} }
private final List<Content> dataSources = new ArrayList<>();
private final IngestJobSettings.IngestType ingestType;
/** /**
* Creates an action which will make a run ingest modules wizard when it is * Constructs an action that invokes the Run Ingest Modules wizard for one
* performed. * or more data sources.
* *
* @param dataSources - the data sources you want to run ingest on * @param dataSources - the data sources you want to run ingest on
*/ */
@ -91,17 +97,25 @@ public final class RunIngestModulesAction extends AbstractAction {
this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name()); this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name());
this.dataSources.addAll(dataSources); this.dataSources.addAll(dataSources);
this.ingestType = IngestJobSettings.IngestType.ALL_MODULES; this.ingestType = IngestJobSettings.IngestType.ALL_MODULES;
this.parentFile = null;
} }
/** /**
* Creates an action which will make a run ingest modules wizard when it is * Constructs an action that invokes the Run Ingest Modules wizard for the
* performed. * children of a file.
* *
* @param dir - the directory you want to run ingest on * @param file The file.
*/ */
public RunIngestModulesAction(Directory dir) { public RunIngestModulesAction(AbstractFile parentFile) {
this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name()); this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name());
this.dataSources.add(dir); this.parentFile = parentFile;
this.ingestType = IngestJobSettings.IngestType.FILES_ONLY; this.ingestType = IngestJobSettings.IngestType.FILES_ONLY;
try {
this.setEnabled(parentFile.hasChildren());
} catch (TskCoreException ex) {
this.setEnabled(false);
logger.log(Level.SEVERE, String.format("Failed to get children count for parent file %s (objId=%d), RunIngestModulesAction disabled", parentFile.getName(), parentFile.getId()), ex);
MessageNotifyUtil.Message.error(Bundle.RunIngestModulesAction_actionPerformed_errorMessage());
}
} }
@ -110,6 +124,9 @@ public final class RunIngestModulesAction extends AbstractAction {
* *
* @param e the action event * @param e the action event
*/ */
@Messages({
"RunIngestModulesAction.actionPerformed.errorMessage=Error querying the case database for the selected item."
})
@Override @Override
public void actionPerformed(ActionEvent e) { public void actionPerformed(ActionEvent e) {
/** /**
@ -129,7 +146,26 @@ public final class RunIngestModulesAction extends AbstractAction {
if (DialogDisplayer.getDefault().notify(wiz) == WizardDescriptor.FINISH_OPTION) { if (DialogDisplayer.getDefault().notify(wiz) == WizardDescriptor.FINISH_OPTION) {
IngestJobSettings ingestJobSettings = wizard.getIngestJobSettings(); IngestJobSettings ingestJobSettings = wizard.getIngestJobSettings();
showWarnings(ingestJobSettings); showWarnings(ingestJobSettings);
if (this.parentFile == null) {
IngestManager.getInstance().queueIngestJob(this.dataSources, ingestJobSettings); IngestManager.getInstance().queueIngestJob(this.dataSources, ingestJobSettings);
} else {
try {
Content dataSource = parentFile.getDataSource();
List<Content> children = parentFile.getChildren();
List<AbstractFile> files = new ArrayList<>();
for (Content child : children) {
if (child instanceof AbstractFile) {
files.add((AbstractFile) child);
}
}
if (!files.isEmpty()) {
IngestManager.getInstance().queueIngestJob(dataSource, files, ingestJobSettings);
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source or children for parent file %s (objId=%d), action failed", parentFile.getName(), parentFile.getId()), ex);
MessageNotifyUtil.Message.error(Bundle.RunIngestModulesAction_actionPerformed_errorMessage());
}
}
} }
} }

View File

@ -26,6 +26,8 @@ import org.sleuthkit.datamodel.TskCoreException;
/** /**
* An interface for implementations of a keyword search service. * An interface for implementations of a keyword search service.
* You can find the implementations by using Lookup, such as:
* Lookup.getDefault().lookup(KeywordSearchService.class)
* *
* TODO (AUT-2158: This interface should not extend Closeable. * TODO (AUT-2158: This interface should not extend Closeable.
*/ */

View File

@ -25,6 +25,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter; import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings; import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel; import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.NoIngestModuleIngestJobSettings;
/** /**
* A factory for creating instances of file ingest modules that carve * A factory for creating instances of file ingest modules that carve
@ -81,7 +82,17 @@ public class PhotoRecCarverIngestModuleFactory extends IngestModuleFactoryAdapte
@Override @Override
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) { public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
if (settings instanceof PhotoRecCarverIngestJobSettings) {
return new PhotoRecCarverIngestJobSettingsPanel((PhotoRecCarverIngestJobSettings) settings); return new PhotoRecCarverIngestJobSettingsPanel((PhotoRecCarverIngestJobSettings) settings);
} }
/*
* Compatibility check for older versions.
*/
if (settings instanceof NoIngestModuleIngestJobSettings) {
return new PhotoRecCarverIngestJobSettingsPanel(new PhotoRecCarverIngestJobSettings());
}
throw new IllegalArgumentException("Expected settings argument to be an instance of PhotoRecCarverIngestJobSettings");
}
} }

View File

@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.Content;
/** /**
* A utility that runs an ingest job, blocking until the job is completed. * A utility that runs an ingest job, blocking until the job is completed.
*/ */
public final class IngestRunner { public final class IngestJobRunner {
/** /**
* Runs an ingest job, blocking until the job is completed. * Runs an ingest job, blocking until the job is completed.
@ -70,7 +70,7 @@ public final class IngestRunner {
/** /**
* IngestRunner instances cannot be instatiated. * IngestRunner instances cannot be instatiated.
*/ */
private IngestRunner() { private IngestJobRunner() {
} }
/** /**

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,33 @@
/*! \page live_triage_page Live Triage
\section live_triage_overview Overview
The Live Triage feature allows you to load Autopsy onto a removable drive to run on target systems while making minimal changes to that target system. This will currently only work on Windows systems.
\section live_triage_create_drive Creating a live triage drive
To create a live triage drive, go to Tools->Make Live Triage Drive to bring up the main dialog.
\image html live_triage_dialog.png
Select the drive you want to use - any type of USB storage device will work. For best results use the fastest drive available. Once the process is complete the root folder will contain an Autopsy folder and a RunFromUSB.bat file.
\section live_triage_usage Running Autopsy from the live triage drive
Insert the drive into the target machine and browse to it in Windows Explorer. Right click on RunFromUSB.bat and select "Run as administrator". This is necessary to analyze the local drives.
\image html live_triage_script.png
Running the script will generate a few more directories on the USB drive. The configData directory stores all the data used by Autopsy - primarily configuration files and temporary files. You can make changes to the Autopsy settings and they will persist between runs. The cases directory is created as a recommended place to save your case data. You will need to browse to it when creating a case in Autopsy.
Once Autopsy is running, proceed to create a case as normal, making sure to save it on the USB drive.
\image html live_triage_case.png
Then choose the Local Disk data source and select the desired drive.
\image html live_triage_ds.png
See the \ref ds_local page for more information on local disk data sources.
*/

View File

@ -60,6 +60,7 @@ The following topics are available here:
- \subpage windows_authentication - \subpage windows_authentication
- \subpage multiuser_sec_page - \subpage multiuser_sec_page
- \subpage multiuser_page - \subpage multiuser_page
- \subpage live_triage_page
- \subpage advanced_page - \subpage advanced_page
If the topic you need is not listed, refer to the <a href="http://wiki.sleuthkit.org/index.php?title=Autopsy_User%27s_Guide">Autopsy Wiki</a> or join the <a href="https://lists.sourceforge.net/lists/listinfo/sleuthkit-users">SleuthKit User List</a> at SourceForge. If the topic you need is not listed, refer to the <a href="http://wiki.sleuthkit.org/index.php?title=Autopsy_User%27s_Guide">Autopsy Wiki</a> or join the <a href="https://lists.sourceforge.net/lists/listinfo/sleuthkit-users">SleuthKit User List</a> at SourceForge.

View File

@ -75,6 +75,19 @@ Typically a general report module should interact with both the Blackboard API i
You should call org.sleuthkit.autopsy.casemodule.Case.addReport() with the path to your report so that it is shown in the Autopsy tree. You can specify a specific file or folder and the user can then view it later. You should call org.sleuthkit.autopsy.casemodule.Case.addReport() with the path to your report so that it is shown in the Autopsy tree. You can specify a specific file or folder and the user can then view it later.
\subsection report_create_module_indexing Indexing Reports
After you have called org.sleuthkit.autopsy.casemodule.Case.addReport() and created a report, you can pass it to org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService.index() so that it is indexed and can then be found by a user. This is most commonly used when an Ingest Module runs a 3rd party tool and the output of that tool is added back into Autopsy as a report. Here is some example code:
\code{.java}
KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
if (null == searchService) {
logger.log(Level.WARNING, "Keyword search service not found. Report will not be indexed");
} else {
searchService.index(report);
}
\endcode
\subsection report_create_module_layer Installing your Report Module \subsection report_create_module_layer Installing your Report Module
Report modules developed using Java must be registered in a layer.xml file. This file allows Autopsy to find the report module. Report modules developed using Java must be registered in a layer.xml file. This file allows Autopsy to find the report module.

View File

@ -56,9 +56,10 @@
<rule ref="rulesets/java/comments.xml/CommentRequired"> <rule ref="rulesets/java/comments.xml/CommentRequired">
<properties> <properties>
<!-- Disabled because we have lots of undocumented fields --> <!-- Disabled because we have lots of undocumented fields -->
<property name="fieldCommentRequirement" value="Unwanted"/> <property name="fieldCommentRequirement" value="Ignored"/>
<!-- Disabled because we don't require comments on overrides of public fields --> <!-- Disabled because we don't require comments on overrides of public fields -->
<property name="publicMethodCommentRequirement" value="Unwanted"/> <property name="publicMethodCommentRequirement" value="Required"/>
<!--<property name="methodWithOverrideCommentRequirement" value="Unwanted"/>-->
</properties> </properties>
</rule> </rule>
<!-- Commented out because it was flagged some of our header / copyright comments <!-- Commented out because it was flagged some of our header / copyright comments