Merge remote-tracking branch 'upstream/develop' into 3595_DeclutterLocalDiskPanel

This commit is contained in:
U-BASIS\dgrove 2018-03-19 10:07:30 -04:00
commit 55cd716b02
42 changed files with 862 additions and 296 deletions

View File

@ -111,7 +111,7 @@ public class CorrelationAttributeInstance implements Serializable {
public String toString() {
return this.getID()
+ this.getCorrelationCase().getCaseUUID()
+ this.getCorrelationDataSource().getName()
+ this.getCorrelationDataSource().getDeviceID()
+ this.getFilePath()
+ this.getKnownStatus()
+ this.getComment();

View File

@ -57,7 +57,8 @@ public class IngestEventsListener {
private static final Logger LOGGER = Logger.getLogger(CorrelationAttribute.class.getName());
final Collection<String> recentlyAddedCeArtifacts = new LinkedHashSet<>();
private static int ceModuleInstanceCount = 0;
private static int correlationModuleInstanceCount;
private static boolean flagNotableItems;
private final ExecutorService jobProcessingExecutor;
private static final String INGEST_EVENT_THREAD_NAME = "Ingest-Event-Listener-%d";
private final PropertyChangeListener pcl1 = new IngestModuleEventListener();
@ -88,21 +89,20 @@ public class IngestEventsListener {
}
/**
* Enable this IngestEventsListener to add contents to the Correlation
* Engine.
*
* Increase the number of IngestEventsListeners adding contents to the
* Correlation Engine.
*/
public synchronized static void incrementCorrelationEngineModuleCount() {
ceModuleInstanceCount++; //Should be called once in the Correlation Engine module's startup method.
correlationModuleInstanceCount++; //Should be called once in the Correlation Engine module's startup method.
}
/**
* Disable this IngestEventsListener from adding contents to the Correlation
* Engine.
* Decrease the number of IngestEventsListeners adding contents to the
* Correlation Engine.
*/
public synchronized static void decrementCorrelationEngineModuleCount() {
if (getCeModuleInstanceCount() > 0) { //prevent it ingestJobCounter from going negative
ceModuleInstanceCount--; //Should be called once in the Correlation Engine module's shutdown method.
correlationModuleInstanceCount--; //Should be called once in the Correlation Engine module's shutdown method.
}
}
@ -111,17 +111,35 @@ public class IngestEventsListener {
* is being run during injest to 0.
*/
synchronized static void resetCeModuleInstanceCount() {
ceModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset
correlationModuleInstanceCount = 0; //called when a case is opened in case for some reason counter was not reset
}
/**
* Wether or not the Correlation Engine Module is enabled for any of the
* Whether or not the Correlation Engine Module is enabled for any of the
* currently running ingest jobs.
*
* @return boolean True for Correlation Engine enabled, False for disabled
*/
private synchronized static int getCeModuleInstanceCount() {
return ceModuleInstanceCount;
public synchronized static int getCeModuleInstanceCount() {
return correlationModuleInstanceCount;
}
/**
* Are notable items being flagged?
*
* @return True if flagging notable items; otherwise false.
*/
public synchronized static boolean isFlagNotableItems() {
return flagNotableItems;
}
/**
* Configure the listener to flag notable items or not.
*
* @param value True to flag notable items; otherwise false.
*/
public synchronized static void setFlagNotableItems(boolean value) {
flagNotableItems = value;
}
@NbBundle.Messages({"IngestEventsListener.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
@ -174,7 +192,7 @@ public class IngestEventsListener {
}
switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
case DATA_ADDED: {
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt));
jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, isFlagNotableItems()));
break;
}
}
@ -212,10 +230,12 @@ public class IngestEventsListener {
private final EamDb dbManager;
private final PropertyChangeEvent event;
private final boolean flagNotableItemsEnabled;
private DataAddedTask(EamDb db, PropertyChangeEvent evt) {
private DataAddedTask(EamDb db, PropertyChangeEvent evt, boolean flagNotableItemsEnabled) {
dbManager = db;
event = evt;
this.flagNotableItemsEnabled = flagNotableItemsEnabled;
}
@Override
@ -241,11 +261,13 @@ public class IngestEventsListener {
// query db for artifact instances having this TYPE/VALUE and knownStatus = "Bad".
// if gettKnownStatus() is "Unknown" and this artifact instance was marked bad in a previous case,
// create TSK_INTERESTING_ARTIFACT_HIT artifact on BB.
if (flagNotableItemsEnabled) {
List<String> caseDisplayNames = dbManager.getListCasesHavingArtifactInstancesKnownBad(eamArtifact.getCorrelationType(), eamArtifact.getCorrelationValue());
if (!caseDisplayNames.isEmpty()) {
postCorrelatedBadArtifactToBlackboard(bbArtifact,
caseDisplayNames);
}
}
eamArtifacts.add(eamArtifact);
}
} catch (EamDbException ex) {

View File

@ -0,0 +1,2 @@
IngestSettingsPanel.ingestSettingsLabel.text=Ingest Settings
IngestSettingsPanel.flagTaggedNotableItemsCheckbox.text=Flag items previously tagged as notable

View File

@ -56,9 +56,11 @@ import org.sleuthkit.autopsy.centralrepository.eventlisteners.IngestEventsListen
*/
@Messages({"IngestModule.prevTaggedSet.text=Previously Tagged As Notable (Central Repository)",
"IngestModule.prevCaseComment.text=Previous Case: "})
class IngestModule implements FileIngestModule {
final class IngestModule implements FileIngestModule {
private final static Logger LOGGER = Logger.getLogger(IngestModule.class.getName());
static final boolean DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS = true;
private final static Logger logger = Logger.getLogger(IngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final IngestModuleReferenceCounter warningMsgRefCounter = new IngestModuleReferenceCounter();
@ -68,8 +70,19 @@ class IngestModule implements FileIngestModule {
private Blackboard blackboard;
private CorrelationAttribute.Type filesType;
private final boolean flagTaggedNotableItems;
/**
* Instantiate the Correlation Engine ingest module.
*
* @param settings The ingest settings for the module instance.
*/
IngestModule(IngestSettings settings) {
flagTaggedNotableItems = settings.isFlagTaggedNotableItems();
}
@Override
public ProcessResult process(AbstractFile af) {
public ProcessResult process(AbstractFile abstractFile) {
if (EamDb.isEnabled() == false) {
/*
* Not signaling an error for now. This is a workaround for the way
@ -83,11 +96,11 @@ class IngestModule implements FileIngestModule {
try {
blackboard = Case.getOpenCase().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex);
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR;
}
if (!EamArtifactUtil.isValidCentralRepoFile(af)) {
if (!EamArtifactUtil.isValidCentralRepoFile(abstractFile)) {
return ProcessResult.OK;
}
@ -95,7 +108,7 @@ class IngestModule implements FileIngestModule {
try {
dbManager = EamDb.getInstance();
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
return ProcessResult.ERROR;
}
@ -105,21 +118,23 @@ class IngestModule implements FileIngestModule {
}
// get the hash because we're going to correlate it
String md5 = af.getMd5Hash();
String md5 = abstractFile.getMd5Hash();
if ((md5 == null) || (HashUtility.isNoDataMd5(md5))) {
return ProcessResult.OK;
}
/* Search the central repo to see if this file was previously
* marked as being bad. Create artifact if it was. */
if (af.getKnown() != TskData.FileKnown.KNOWN) {
/*
* Search the central repo to see if this file was previously marked as
* being bad. Create artifact if it was.
*/
if (abstractFile.getKnown() != TskData.FileKnown.KNOWN && flagTaggedNotableItems) {
try {
List<String> caseDisplayNames = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5);
if (!caseDisplayNames.isEmpty()) {
postCorrelatedBadFileToBlackboard(af, caseDisplayNames);
List<String> caseDisplayNamesList = dbManager.getListCasesHavingArtifactInstancesKnownBad(filesType, md5);
if (!caseDisplayNamesList.isEmpty()) {
postCorrelatedBadFileToBlackboard(abstractFile, caseDisplayNamesList);
}
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error searching database for artifact.", ex); // NON-NLS
return ProcessResult.ERROR;
}
}
@ -130,14 +145,14 @@ class IngestModule implements FileIngestModule {
CorrelationAttributeInstance cefi = new CorrelationAttributeInstance(
eamCase,
eamDataSource,
af.getParentPath() + af.getName(),
abstractFile.getParentPath() + abstractFile.getName(),
null,
TskData.FileKnown.UNKNOWN // NOTE: Known status in the CR is based on tagging, not hashes like the Case Database.
);
eamArtifact.addInstance(cefi);
dbManager.prepareBulkArtifact(eamArtifact);
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error adding artifact to bulk artifacts.", ex); // NON-NLS
return ProcessResult.ERROR;
}
@ -147,6 +162,7 @@ class IngestModule implements FileIngestModule {
@Override
public void shutDown() {
IngestEventsListener.decrementCorrelationEngineModuleCount();
if ((EamDb.isEnabled() == false) || (eamCase == null) || (eamDataSource == null)) {
return;
}
@ -154,19 +170,19 @@ class IngestModule implements FileIngestModule {
try {
dbManager = EamDb.getInstance();
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
logger.log(Level.SEVERE, "Error connecting to Central Repository database.", ex);
return;
}
try {
dbManager.bulkInsertArtifacts();
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error doing bulk insert of artifacts.", ex); // NON-NLS
}
try {
Long count = dbManager.getCountArtifactInstancesByCaseDataSource(eamCase.getCaseUUID(), eamDataSource.getDeviceID());
LOGGER.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS
logger.log(Level.INFO, "{0} artifacts in db for case: {1} ds:{2}", new Object[]{count, eamCase.getDisplayName(), eamDataSource.getName()}); // NON-NLS
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error counting artifacts.", ex); // NON-NLS
}
// TODO: once we implement shared cache, if refCounter is 1, then submit data in bulk.
@ -181,6 +197,25 @@ class IngestModule implements FileIngestModule {
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
IngestEventsListener.incrementCorrelationEngineModuleCount();
/*
* Tell the IngestEventsListener to flag notable items based on the
* current module's configuration. This is a work around for the lack of
* an artifacts pipeline. Note that this can be changed by another
* module instance. All modules are affected by the value. While not
* ideal, this will be good enough until a better solution can be
* posited.
*
* Note: Flagging cannot be disabled if any other instances of the
* Correlation Engine module are running. This restriction is to prevent
* missing results in the case where the first module is flagging
* notable items, and the proceeding module (with flagging disabled)
* causes the first to stop flagging.
*/
if (IngestEventsListener.getCeModuleInstanceCount() == 1 || !IngestEventsListener.isFlagNotableItems()) {
IngestEventsListener.setFlagNotableItems(flagTaggedNotableItems);
}
if (EamDb.isEnabled() == false) {
/*
* Not throwing the customary exception for now. This is a
@ -200,14 +235,14 @@ class IngestModule implements FileIngestModule {
try {
autopsyCase = Case.getOpenCase();
} catch (NoCurrentCaseException ex) {
LOGGER.log(Level.SEVERE, "Exception while getting open case.", ex);
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
throw new IngestModuleException("Exception while getting open case.", ex);
}
// Don't allow sqlite central repo databases to be used for multi user cases
if ((autopsyCase.getCaseType() == Case.CaseType.MULTI_USER_CASE)
&& (EamDbPlatformEnum.getSelectedPlatform() == EamDbPlatformEnum.SQLITE)) {
LOGGER.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
logger.log(Level.SEVERE, "Cannot run correlation engine on a multi-user case with a SQLite central repository.");
throw new IngestModuleException("Cannot run on a multi-user case with a SQLite central repository."); // NON-NLS
}
jobId = context.getJobId();
@ -216,14 +251,14 @@ class IngestModule implements FileIngestModule {
try {
centralRepoDb = EamDb.getInstance();
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error connecting to central repository database.", ex); // NON-NLS
throw new IngestModuleException("Error connecting to central repository database.", ex); // NON-NLS
}
try {
filesType = centralRepoDb.getCorrelationTypeById(CorrelationAttribute.FILES_TYPE_ID);
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
throw new IngestModuleException("Error getting correlation type FILES in ingest module start up.", ex); // NON-NLS
}
@ -237,7 +272,7 @@ class IngestModule implements FileIngestModule {
try {
eamCase = centralRepoDb.newCase(autopsyCase);
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error creating new case in ingest module start up.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error creating new case in ingest module start up.", ex); // NON-NLS
throw new IngestModuleException("Error creating new case in ingest module start up.", ex); // NON-NLS
}
}
@ -245,7 +280,7 @@ class IngestModule implements FileIngestModule {
try {
eamDataSource = CorrelationDataSource.fromTSKDataSource(eamCase, context.getDataSource());
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error getting data source info.", ex); // NON-NLS
throw new IngestModuleException("Error getting data source info.", ex); // NON-NLS
}
// TODO: once we implement a shared cache, load/init it here w/ syncronized and define reference counter
@ -259,7 +294,7 @@ class IngestModule implements FileIngestModule {
centralRepoDb.newDataSource(eamDataSource);
}
} catch (EamDbException ex) {
LOGGER.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS
logger.log(Level.SEVERE, "Error adding data source to Central Repository.", ex); // NON-NLS
throw new IngestModuleException("Error adding data source to Central Repository.", ex); // NON-NLS
}
@ -282,7 +317,7 @@ class IngestModule implements FileIngestModule {
// index the artifact for keyword search
blackboard.indexArtifact(tifArtifact);
} catch (Blackboard.BlackboardException ex) {
LOGGER.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
logger.log(Level.SEVERE, "Unable to index blackboard artifact " + tifArtifact.getArtifactID(), ex); //NON-NLS
}
// send inbox message
@ -291,9 +326,9 @@ class IngestModule implements FileIngestModule {
// fire event to notify UI of this new artifact
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT));
} catch (TskCoreException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
logger.log(Level.SEVERE, "Failed to create BlackboardArtifact.", ex); // NON-NLS
} catch (IllegalStateException ex) {
LOGGER.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
logger.log(Level.SEVERE, "Failed to create BlackboardAttribute.", ex); // NON-NLS
}
}

View File

@ -1,7 +1,7 @@
/*
* Central Repository
*
* Copyright 2015-2017 Basis Technology Corp.
* Copyright 2015-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -25,6 +25,9 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.centralrepository.optionspanel.GlobalSettingsPanel;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.NoIngestModuleIngestJobSettings;
/**
* Factory for Central Repository ingest modules
@ -34,8 +37,11 @@ import org.sleuthkit.autopsy.centralrepository.optionspanel.GlobalSettingsPanel;
"IngestModuleFactory.ingestmodule.desc=Saves properties to the central repository for later correlation"})
public class IngestModuleFactory extends IngestModuleFactoryAdapter {
private static final String VERSION_NUMBER = "0.8.0";
/**
* Get the name of the module.
*
* @return The module name.
*/
static String getModuleName() {
return Bundle.IngestModuleFactory_ingestmodule_name();
}
@ -52,7 +58,7 @@ public class IngestModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleVersionNumber() {
return VERSION_NUMBER;
return Version.getVersion();
}
@Override
@ -61,8 +67,8 @@ public class IngestModuleFactory extends IngestModuleFactoryAdapter {
}
@Override
public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings ingestOptions) {
return new IngestModule();
public FileIngestModule createFileIngestModule(IngestModuleIngestJobSettings settings) {
return new IngestModule((IngestSettings) settings);
}
@Override
@ -77,4 +83,29 @@ public class IngestModuleFactory extends IngestModuleFactoryAdapter {
return globalOptionsPanel;
}
@Override
public IngestModuleIngestJobSettings getDefaultIngestJobSettings() {
return new IngestSettings();
}
@Override
public boolean hasIngestJobSettingsPanel() {
return true;
}
@Override
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
if (settings instanceof IngestSettings) {
return new IngestSettingsPanel((IngestSettings) settings);
}
/*
* Compatibility check for older versions.
*/
if (settings instanceof NoIngestModuleIngestJobSettings) {
return new IngestSettingsPanel(new IngestSettings());
}
throw new IllegalArgumentException("Expected settings argument to be an instance of IngestSettings");
}
}

View File

@ -0,0 +1,71 @@
/*
* Central Repository
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
/**
* Ingest job settings for the Correlation Engine module.
*/
final class IngestSettings implements IngestModuleIngestJobSettings {
private static final long serialVersionUID = 1L;
private boolean flagTaggedNotableItems;
/**
* Instantiate the ingest job settings with default values.
*/
IngestSettings() {
this.flagTaggedNotableItems = IngestModule.DEFAULT_FLAG_TAGGED_NOTABLE_ITEMS;
}
/**
* Instantiate the ingest job settings.
*
* @param flagTaggedNotableItems Flag previously tagged notable items.
*/
IngestSettings(boolean flagTaggedNotableItems) {
this.flagTaggedNotableItems = flagTaggedNotableItems;
}
@Override
public long getVersionNumber() {
return serialVersionUID;
}
/**
* Are previously tagged notable items to be flagged?
*
* @return True if flagging; otherwise false.
*/
boolean isFlagTaggedNotableItems() {
return flagTaggedNotableItems;
}
/**
* Flag or ignore previously identified notable items.
*
* @param ignorePreviousNotableItems Are previously tagged notable items to
* be flagged?
*/
void setFlagTaggedNotableItems(boolean flagTaggedNotableItems) {
this.flagTaggedNotableItems = flagTaggedNotableItems;
}
}

View File

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8" ?>
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
<AuxValues>
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="true"/>
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
</AuxValues>
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" attributes="0">
<EmptySpace min="10" pref="10" max="-2" attributes="0"/>
<Component id="flagTaggedNotableItemsCheckbox" min="-2" max="-2" attributes="0"/>
</Group>
<Component id="ingestSettingsLabel" min="-2" max="-2" attributes="0"/>
</Group>
<EmptySpace pref="65" max="32767" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="ingestSettingsLabel" min="-2" max="-2" attributes="0"/>
<EmptySpace type="unrelated" max="-2" attributes="0"/>
<Component id="flagTaggedNotableItemsCheckbox" min="-2" max="-2" attributes="0"/>
<EmptySpace pref="245" max="32767" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
</Layout>
<SubComponents>
<Component class="javax.swing.JLabel" name="ingestSettingsLabel">
<Properties>
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
<Font name="Tahoma" size="11" style="1"/>
</Property>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties" key="IngestSettingsPanel.ingestSettingsLabel.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
</Component>
<Component class="javax.swing.JCheckBox" name="flagTaggedNotableItemsCheckbox">
<Properties>
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/centralrepository/ingestmodule/Bundle.properties" key="IngestSettingsPanel.flagTaggedNotableItemsCheckbox.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
</Properties>
</Component>
</SubComponents>
</Form>

View File

@ -0,0 +1,97 @@
/*
* Central Repository
*
* Copyright 2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.centralrepository.ingestmodule;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
/**
* Ingest job settings panel for the Correlation Engine module.
*/
final class IngestSettingsPanel extends IngestModuleIngestJobSettingsPanel {
/**
* Creates new form IngestSettingsPanel
*/
public IngestSettingsPanel(IngestSettings settings) {
initComponents();
customizeComponents(settings);
}
/**
* Update components with values from the ingest job settings.
*
* @param settings The ingest job settings.
*/
private void customizeComponents(IngestSettings settings) {
flagTaggedNotableItemsCheckbox.setSelected(settings.isFlagTaggedNotableItems());
}
@Override
public IngestModuleIngestJobSettings getSettings() {
return new IngestSettings(flagTaggedNotableItemsCheckbox.isSelected());
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
ingestSettingsLabel = new javax.swing.JLabel();
flagTaggedNotableItemsCheckbox = new javax.swing.JCheckBox();
ingestSettingsLabel.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(ingestSettingsLabel, org.openide.util.NbBundle.getMessage(IngestSettingsPanel.class, "IngestSettingsPanel.ingestSettingsLabel.text")); // NOI18N
org.openide.awt.Mnemonics.setLocalizedText(flagTaggedNotableItemsCheckbox, org.openide.util.NbBundle.getMessage(IngestSettingsPanel.class, "IngestSettingsPanel.flagTaggedNotableItemsCheckbox.text")); // NOI18N
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(10, 10, 10)
.addComponent(flagTaggedNotableItemsCheckbox))
.addComponent(ingestSettingsLabel))
.addContainerGap(65, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(ingestSettingsLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(flagTaggedNotableItemsCheckbox)
.addContainerGap(245, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JCheckBox flagTaggedNotableItemsCheckbox;
private javax.swing.JLabel ingestSettingsLabel;
// End of variables declaration//GEN-END:variables
}

View File

@ -17,6 +17,9 @@
<Layout class="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout"/>
<SubComponents>
<Container class="javax.swing.JSplitPane" name="jSplitPane1">
<Properties>
<Property name="dividerLocation" type="int" value="500"/>
</Properties>
<Constraints>
<Constraint layoutClass="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout" value="org.netbeans.modules.form.compat2.layouts.DesignBorderLayout$BorderConstraintsDescription">
<BorderConstraints direction="Center"/>
@ -31,6 +34,8 @@
<JSplitPaneConstraints position="left"/>
</Constraint>
</Constraints>
<Layout class="org.netbeans.modules.form.compat2.layouts.support.JScrollPaneSupportLayout"/>
</Container>
</SubComponents>
</Container>

View File

@ -34,6 +34,7 @@ import org.openide.nodes.Children;
import org.openide.util.Lookup;
import org.openide.util.lookup.ProxyLookup;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.CommunicationsManager;
import org.sleuthkit.datamodel.TskCoreException;
@ -120,10 +121,12 @@ public final class AccountsBrowser extends JPanel implements ExplorerManager.Pro
@Subscribe
public void handleFilterEvent(CVTEvents.FilterChangeEvent filterChangeEvent) {
try {
final CommunicationsManager commsManager = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager();
final CommunicationsManager commsManager = Case.getOpenCase().getSleuthkitCase().getCommunicationsManager();
accountsTableEM.setRootContext(new AbstractNode(Children.create(new AccountDeviceInstanceNodeFactory(commsManager, filterChangeEvent.getNewFilter()), true)));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was an error getting the CommunicationsManager for the current case.", ex);
} catch (NoCurrentCaseException ex) { //NOPMD empty catch clause
//Case is closed, do nothig.
}
}
@ -141,6 +144,7 @@ public final class AccountsBrowser extends JPanel implements ExplorerManager.Pro
setLayout(new java.awt.BorderLayout());
jSplitPane1.setDividerLocation(500);
jSplitPane1.setLeftComponent(outlineView);
add(jSplitPane1, java.awt.BorderLayout.CENTER);

View File

@ -129,7 +129,7 @@ final public class VisualizationPanel extends JPanel implements Lookup.Provider
private final mxGraphComponent graphComponent;
private final CommunicationsGraph graph;
private mxUndoManager undoManager = new mxUndoManager();
private final mxUndoManager undoManager = new mxUndoManager();
private final mxRubberband rubberband;
private final mxFastOrganicLayout fastOrganicLayout;
private final mxCircleLayout circleLayout;

View File

@ -1,3 +1 @@
OpenIDE-Module-Name=CoreComponentInterfaces
CoreComponentControl.CTL_DirectoryTreeTopComponent=Directory Tree
CoreComponentControl.CTL_FavoritesTopComponent=Favorites

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Copyright 2011-18 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,31 +19,30 @@
package org.sleuthkit.autopsy.corecomponentinterfaces;
import java.util.Collection;
import java.util.Iterator;
import java.util.Set;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.openide.util.Lookup;
import org.openide.util.NbBundle;
import org.openide.windows.Mode;
import org.openide.windows.TopComponent;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.corecomponents.DataContentTopComponent;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Responsible for opening and closing the core windows when a case is opened
* and closed.
*
* @author jantonius
*/
public class CoreComponentControl {
final public class CoreComponentControl {
private static final Logger logger = Logger.getLogger(CoreComponentControl.class.getName());
private static final String DIRECTORY_TREE = NbBundle.getMessage(CoreComponentControl.class,
"CoreComponentControl.CTL_DirectoryTreeTopComponent");
private static final String FAVORITES = NbBundle.getMessage(CoreComponentControl.class,
"CoreComponentControl.CTL_FavoritesTopComponent");
@NbBundle.Messages("CoreComponentControl.CTL_DirectoryTreeTopComponent=Directory Tree")
private static final String DIRECTORY_TREE = Bundle.CoreComponentControl_CTL_DirectoryTreeTopComponent();
@NbBundle.Messages("CoreComponentControl.CTL_FavoritesTopComponent=Favorites")
private static final String FAVORITES = Bundle.CoreComponentControl_CTL_FavoritesTopComponent();
private CoreComponentControl() {
}
/**
* Opens all TopComponent windows that are needed
@ -56,22 +55,22 @@ public class CoreComponentControl {
Collection<? extends DataExplorer> dataExplorers = Lookup.getDefault().lookupAll(DataExplorer.class);
for (DataExplorer de : dataExplorers) {
TopComponent explorerWin = de.getTopComponent();
Mode m = WindowManager.getDefault().findMode("explorer"); //NON-NLS
if (m != null) {
m.dockInto(explorerWin); // redock into the explorer mode
} else {
Mode explorerMode = WindowManager.getDefault().findMode("explorer"); //NON-NLS
if (explorerMode == null) {
logger.log(Level.WARNING, "Could not find explorer mode and dock explorer window"); //NON-NLS
} else {
explorerMode.dockInto(explorerWin); // redock into the explorer mode
}
explorerWin.open(); // open that top component
}
// find the data content top component
TopComponent contentWin = DataContentTopComponent.findInstance();
Mode m = WindowManager.getDefault().findMode("output"); //NON-NLS
if (m != null) {
m.dockInto(contentWin); // redock into the output mode
} else {
Mode outputMode = WindowManager.getDefault().findMode("output"); //NON-NLS
if (outputMode == null) {
logger.log(Level.WARNING, "Could not find output mode and dock content window"); //NON-NLS
} else {
outputMode.dockInto(contentWin); // redock into the output mode
}
contentWin.open(); // open that top component
@ -86,20 +85,15 @@ public class CoreComponentControl {
* be thrown from JFXPanel.
*/
public static void closeCoreWindows() {
WindowManager wm = WindowManager.getDefault();
Set<? extends Mode> modes = wm.getModes();
Iterator<? extends Mode> iter = wm.getModes().iterator();
TopComponent directoryTree = null;
TopComponent favorites = null;
String tcName = "";
while (iter.hasNext()) {
Mode mode = iter.next();
for (TopComponent tc : mode.getTopComponents()) {
tcName = tc.getName();
final WindowManager windowManager = WindowManager.getDefault();
for (Mode mode : windowManager.getModes()) {
for (TopComponent tc : windowManager.getOpenedTopComponents(mode)) {
String tcName = tc.getName();
if (tcName == null) {
logger.log(Level.INFO, "tcName was null"); //NON-NLS
tcName = "";
}
// switch requires constant strings, so converted to if/else.
if (DIRECTORY_TREE.equals(tcName)) {

View File

@ -185,7 +185,8 @@ public final class DataContentTopComponent extends TopComponent implements DataC
} catch (NoCurrentCaseException ex) {
return true;
}
return (!this.isDefault) || openCase.hasData() == false;
return (this.isDefault ==false) ||( openCase.hasData() == false);
}
@Override

View File

@ -407,19 +407,19 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileModifiedTime.displayName"),
"",
file != null ? ContentUtils.getStringTime(file.getMtime(), file) : ""));
file == null ? "" : ContentUtils.getStringTime(file.getMtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileChangedTime.displayName"),
"",
file != null ? ContentUtils.getStringTime(file.getCtime(), file) : ""));
file == null ? "" : ContentUtils.getStringTime(file.getCtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileAccessedTime.displayName"),
"",
file != null ? ContentUtils.getStringTime(file.getAtime(), file) : ""));
file == null ? "" : ContentUtils.getStringTime(file.getAtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileCreatedTime.displayName"),
"",
file != null ? ContentUtils.getStringTime(file.getCrtime(), file) : ""));
file == null ? "" : ContentUtils.getStringTime(file.getCrtime(), file)));
ss.put(new NodeProperty<>(NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.name"),
NbBundle.getMessage(BlackboardArtifactNode.class, "ContentTagNode.createSheet.fileSize.displayName"),
"",
@ -427,7 +427,7 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
ss.put(new NodeProperty<>(Bundle.BlackboardArtifactNode_createSheet_artifactMD5_name(),
Bundle.BlackboardArtifactNode_createSheet_artifactMD5_displayName(),
"",
file != null ? StringUtils.defaultString(file.getMd5Hash()) : ""));
file == null ? "" : StringUtils.defaultString(file.getMd5Hash())));
}
} else {
String dataSourceStr = "";
@ -456,7 +456,6 @@ public class BlackboardArtifactNode extends AbstractContentNode<BlackboardArtifa
return s;
}
/**
* Used by (subclasses of) BlackboardArtifactNode to add the tags property
* to their sheets.

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -20,7 +20,6 @@ package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import javax.swing.Action;
@ -29,14 +28,12 @@ import org.openide.util.Utilities;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.directorytree.ExtractAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.directorytree.ViewContextAction;
import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction;
import org.sleuthkit.autopsy.timeline.actions.ViewFileInTimelineAction;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
@ -46,8 +43,6 @@ import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
*/
public class DirectoryNode extends AbstractFsContentNode<AbstractFile> {
private static final Logger LOGGER = Logger.getLogger(DirectoryNode.class.getName());
public static final String DOTDOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.parFolder.text");
public static final String DOTDIR = NbBundle.getMessage(DirectoryNode.class, "DirectoryNode.curFolder.text");
@ -95,7 +90,7 @@ public class DirectoryNode extends AbstractFsContentNode<AbstractFile> {
actionsList.add(null); // creates a menu separator
actionsList.add(ExtractAction.getInstance());
actionsList.add(null); // creates a menu separator
actionsList.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));
actionsList.add(new RunIngestModulesAction(content));
actionsList.add(null); // creates a menu separator
actionsList.add(AddContentTagAction.getInstance());

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2017 Basis Technology Corp.
* Copyright 2017-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -19,7 +19,6 @@
package org.sleuthkit.autopsy.datamodel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.swing.Action;
import org.openide.util.NbBundle;
@ -29,7 +28,6 @@ import org.sleuthkit.autopsy.directorytree.FileSearchAction;
import org.sleuthkit.autopsy.directorytree.NewWindowViewAction;
import org.sleuthkit.autopsy.ingest.runIngestModuleWizard.RunIngestModulesAction;
import org.sleuthkit.datamodel.SpecialDirectory;
import org.sleuthkit.datamodel.Content;
/**
* Parent class for special directory types (Local and Virtual)
@ -61,9 +59,8 @@ public abstract class SpecialDirectoryNode extends AbstractAbstractFileNode<Spec
actions.add(null); // creates a menu separator
actions.add(ExtractAction.getInstance());
actions.add(null); // creates a menu separator
actions.add(new FileSearchAction(
Bundle.ImageNode_getActions_openFileSearchByAttr_text()));
actions.add(new RunIngestModulesAction(Collections.<Content>singletonList(content)));
actions.add(new FileSearchAction(Bundle.ImageNode_getActions_openFileSearchByAttr_text()));
actions.add(new RunIngestModulesAction(content));
actions.addAll(ContextMenuExtensionPoint.getActions());
return actions.toArray(new Action[0]);
}

View File

@ -88,7 +88,7 @@ import org.sleuthkit.datamodel.TskData.DbType;
final public class Accounts implements AutopsyVisitableItem {
private static final Logger LOGGER = Logger.getLogger(Accounts.class.getName());
private static final String iconBasePath = "/org/sleuthkit/autopsy/images/"; //NON-NLS
private static final String ICON_BASE_PATH = "/org/sleuthkit/autopsy/images/"; //NON-NLS
@NbBundle.Messages("AccountsRootNode.name=Accounts")
final public static String NAME = Bundle.AccountsRootNode_name();
@ -96,10 +96,8 @@ final public class Accounts implements AutopsyVisitableItem {
private SleuthkitCase skCase;
private final EventBus reviewStatusBus = new EventBus("ReviewStatusBus");
/**
* Should rejected accounts be shown in the accounts section of the tree.
*/
private boolean showRejected = false;
/* Should rejected accounts be shown in the accounts section of the tree. */
private boolean showRejected = false; //NOPMD redundant initializer
private final RejectAccounts rejectActionInstance;
private final ApproveAccounts approveActionInstance;
@ -717,8 +715,8 @@ final public class Accounts implements AutopsyVisitableItem {
@Override
protected boolean createKeys(List<FileWithCCN> list) {
String query
= "SELECT blackboard_artifacts.obj_id," //NON-NLS
String query =
"SELECT blackboard_artifacts.obj_id," //NON-NLS
+ " solr_attribute.value_text AS solr_document_id, "; //NON-NLS
if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) {
query += " string_agg(blackboard_artifacts.artifact_id::character varying, ',') AS artifact_IDs, " //NON-NLS
@ -739,14 +737,14 @@ final public class Accounts implements AutopsyVisitableItem {
+ " GROUP BY blackboard_artifacts.obj_id, solr_document_id " //NON-NLS
+ " ORDER BY hits DESC "; //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) {
while (rs.next()) {
ResultSet resultSet = results.getResultSet();) {
while (resultSet.next()) {
list.add(new FileWithCCN(
rs.getLong("obj_id"), //NON-NLS
rs.getString("solr_document_id"), //NON-NLS
unGroupConcat(rs.getString("artifact_IDs"), Long::valueOf), //NON-NLS
rs.getLong("hits"), //NON-NLS
new HashSet<>(unGroupConcat(rs.getString("review_status_ids"), id -> BlackboardArtifact.ReviewStatus.withID(Integer.valueOf(id)))))); //NON-NLS
resultSet.getLong("obj_id"), //NON-NLS
resultSet.getString("solr_document_id"), //NON-NLS
unGroupConcat(resultSet.getString("artifact_IDs"), Long::valueOf), //NON-NLS
resultSet.getLong("hits"), //NON-NLS
new HashSet<>(unGroupConcat(resultSet.getString("review_status_ids"), reviewStatusID -> BlackboardArtifact.ReviewStatus.withID(Integer.valueOf(reviewStatusID)))))); //NON-NLS
}
} catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for files with ccn hits.", ex); //NON-NLS
@ -794,8 +792,8 @@ final public class Accounts implements AutopsyVisitableItem {
"# {0} - number of children",
"Accounts.ByFileNode.displayName=By File ({0})"})
private void updateDisplayName() {
String query
= "SELECT count(*) FROM ( SELECT count(*) AS documents "
String query =
"SELECT count(*) FROM ( SELECT count(*) AS documents "
+ " FROM blackboard_artifacts " //NON-NLS
+ " LEFT JOIN blackboard_attributes as solr_attribute ON blackboard_artifacts.artifact_id = solr_attribute.artifact_id " //NON-NLS
+ " AND solr_attribute.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_SEARCH_DOCUMENT_ID.getTypeID() //NON-NLS
@ -806,12 +804,12 @@ final public class Accounts implements AutopsyVisitableItem {
+ getRejectedArtifactFilterClause()
+ " GROUP BY blackboard_artifacts.obj_id, solr_attribute.value_text ) AS foo";
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) {
while (rs.next()) {
ResultSet resultSet = results.getResultSet();) {
while (resultSet.next()) {
if (skCase.getDatabaseType().equals(DbType.POSTGRESQL)) {
setDisplayName(Bundle.Accounts_ByFileNode_displayName(rs.getLong("count")));
setDisplayName(Bundle.Accounts_ByFileNode_displayName(resultSet.getLong("count")));
} else {
setDisplayName(Bundle.Accounts_ByFileNode_displayName(rs.getLong("count(*)")));
setDisplayName(Bundle.Accounts_ByFileNode_displayName(resultSet.getLong("count(*)")));
}
}
} catch (TskCoreException | SQLException ex) {
@ -872,7 +870,7 @@ final public class Accounts implements AutopsyVisitableItem {
&& eventData.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()) {
reviewStatusBus.post(eventData);
}
} catch (NoCurrentCaseException notUsed) {
} catch (NoCurrentCaseException notUsed) { //NOPMD empy catch clause
// Case is closed, do nothing.
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
@ -887,17 +885,16 @@ final public class Accounts implements AutopsyVisitableItem {
Case.getOpenCase();
refresh(true);
} catch (NoCurrentCaseException notUsed) {
} catch (NoCurrentCaseException notUsed) { //NOPMD empy catch clause
// Case is closed, do nothing.
}
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())
&& (evt.getNewValue() == null)) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify();
skCase = null;
}
}
}
};
@Override
@ -931,8 +928,8 @@ final public class Accounts implements AutopsyVisitableItem {
RangeMap<Integer, BinResult> binRanges = TreeRangeMap.create();
String query
= "SELECT SUBSTR(blackboard_attributes.value_text,1,8) AS BIN, " //NON-NLS
String query =
"SELECT SUBSTR(blackboard_attributes.value_text,1,8) AS BIN, " //NON-NLS
+ " COUNT(blackboard_artifacts.artifact_id) AS count " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS
@ -941,8 +938,8 @@ final public class Accounts implements AutopsyVisitableItem {
+ getRejectedArtifactFilterClause()
+ " GROUP BY BIN " //NON-NLS
+ " ORDER BY BIN "; //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query)) {
ResultSet resultSet = results.getResultSet();
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet resultSet = results.getResultSet();) {
//sort all te individual bins in to the ranges
while (resultSet.next()) {
final Integer bin = Integer.valueOf(resultSet.getString("BIN"));
@ -956,16 +953,15 @@ final public class Accounts implements AutopsyVisitableItem {
count += previousResult.getCount();
}
if (binRange != null) {
binRanges.put(Range.closed(binRange.getBINstart(), binRange.getBINend()), new BinResult(count, binRange));
} else {
if (binRange == null) {
binRanges.put(Range.closed(bin, bin), new BinResult(count, bin, bin));
} else {
binRanges.put(Range.closed(binRange.getBINstart(), binRange.getBINend()), new BinResult(count, binRange));
}
}
binRanges.asMapOfRanges().values().forEach(list::add);
} catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for BINs.", ex); //NON-NLS
}
return true;
@ -999,15 +995,15 @@ final public class Accounts implements AutopsyVisitableItem {
"# {0} - number of children",
"Accounts.ByBINNode.displayName=By BIN ({0})"})
private void updateDisplayName() {
String query
= "SELECT count(distinct SUBSTR(blackboard_attributes.value_text,1,8)) AS BINs " //NON-NLS
String query =
"SELECT count(distinct SUBSTR(blackboard_attributes.value_text,1,8)) AS BINs " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id" //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_CARD_NUMBER.getTypeID() //NON-NLS
+ getRejectedArtifactFilterClause(); //NON-NLS
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query)) {
ResultSet resultSet = results.getResultSet();
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet resultSet = results.getResultSet();) {
while (resultSet.next()) {
setDisplayName(Bundle.Accounts_ByBINNode_displayName(resultSet.getLong("BINs")));
}
@ -1188,7 +1184,8 @@ final public class Accounts implements AutopsyVisitableItem {
* @param key The FileWithCCN that backs this node.
* @param content The Content object the key represents.
* @param lookupContents The contents of this Node's lookup. It should
* contain the content object and the account artifacts.
* contain the content object and the account
* artifacts.
*/
@NbBundle.Messages({
"# {0} - raw file name",
@ -1226,29 +1223,29 @@ final public class Accounts implements AutopsyVisitableItem {
"Accounts.FileWithCCNNode.statusProperty.displayName=Status",
"Accounts.FileWithCCNNode.noDescription=no description"})
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
Sheet sheet = super.createSheet();
Sheet.Set propSet = sheet.get(Sheet.PROPERTIES);
if (propSet == null) {
propSet = Sheet.createPropertiesSet();
sheet.put(propSet);
}
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(),
propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_nameProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(),
fileName));
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(),
propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_accountsProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(),
fileKey.getHits()));
ss.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(),
propSet.put(new NodeProperty<>(Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_statusProperty_displayName(),
Bundle.Accounts_FileWithCCNNode_noDescription(),
fileKey.getStatuses().stream()
.map(BlackboardArtifact.ReviewStatus::getDisplayName)
.collect(Collectors.joining(", ")))); //NON-NLS
return s;
return sheet;
}
@Override
@ -1292,8 +1289,8 @@ final public class Accounts implements AutopsyVisitableItem {
@Override
protected boolean createKeys(List<Long> list) {
String query
= "SELECT blackboard_artifacts.artifact_id " //NON-NLS
String query =
"SELECT blackboard_artifacts.artifact_id " //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
@ -1339,9 +1336,7 @@ final public class Accounts implements AutopsyVisitableItem {
final public class BINNode extends DisplayableItemNode {
/**
* Creates the nodes for the credit card numbers
*/
/** Creates the nodes for the credit card numbers */
private final BinResult bin;
private BINNode(BinResult bin) {
@ -1365,8 +1360,8 @@ final public class Accounts implements AutopsyVisitableItem {
}
private void updateDisplayName() {
String query
= "SELECT count(blackboard_artifacts.artifact_id ) AS count" //NON-NLS
String query =
"SELECT count(blackboard_artifacts.artifact_id ) AS count" //NON-NLS
+ " FROM blackboard_artifacts " //NON-NLS
+ " JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS
+ " WHERE blackboard_artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID() //NON-NLS
@ -1374,9 +1369,9 @@ final public class Accounts implements AutopsyVisitableItem {
+ " AND blackboard_attributes.value_text >= '" + bin.getBINStart() + "' AND blackboard_attributes.value_text < '" + (bin.getBINEnd() + 1) + "'" //NON-NLS
+ getRejectedArtifactFilterClause();
try (SleuthkitCase.CaseDbQuery results = skCase.executeQuery(query);
ResultSet rs = results.getResultSet();) {
while (rs.next()) {
setDisplayName(getBinRangeString(bin) + " (" + rs.getLong("count") + ")"); //NON-NLS
ResultSet resultSet = results.getResultSet();) {
while (resultSet.next()) {
setDisplayName(getBinRangeString(bin) + " (" + resultSet.getLong("count") + ")"); //NON-NLS
}
} catch (TskCoreException | SQLException ex) {
LOGGER.log(Level.SEVERE, "Error querying for account artifacts.", ex); //NON-NLS
@ -1506,9 +1501,7 @@ final public class Accounts implements AutopsyVisitableItem {
return true;
}
/**
* The number of accounts with this BIN
*/
/** The number of accounts with this BIN */
private final long count;
private final BINRange binRange;
@ -1598,7 +1591,7 @@ final public class Accounts implements AutopsyVisitableItem {
private AccountArtifactNode(BlackboardArtifact artifact) {
super(artifact, "org/sleuthkit/autopsy/images/credit-card.png"); //NON-NLS
this.artifact = artifact;
setName("" + this.artifact.getArtifactID());
setName(Long.toString(this.artifact.getArtifactID()));
reviewStatusBus.register(this);
}
@ -1728,7 +1721,7 @@ final public class Accounts implements AutopsyVisitableItem {
selectedPaths.forEach(path -> {
try {
toArray.add(NodeOp.findPath(rootNode, path));
} catch (NodeNotFoundException ex) {
} catch (NodeNotFoundException ex) { //NOPMD empty catch clause
//just ingnore paths taht don't exist. this is expected since we are rejecting
}
});
@ -1772,25 +1765,25 @@ final public class Accounts implements AutopsyVisitableItem {
public static String getIconFilePath(Account.Type type) {
if (type.equals(Account.Type.CREDIT_CARD)) {
return iconBasePath + "credit-card.png";
return ICON_BASE_PATH + "credit-card.png";
} else if (type.equals(Account.Type.DEVICE)) {
return iconBasePath + "image.png";
return ICON_BASE_PATH + "image.png";
} else if (type.equals(Account.Type.EMAIL)) {
return iconBasePath + "email.png";
return ICON_BASE_PATH + "email.png";
} else if (type.equals(Account.Type.FACEBOOK)) {
return iconBasePath + "facebook.png";
return ICON_BASE_PATH + "facebook.png";
} else if (type.equals(Account.Type.INSTAGRAM)) {
return iconBasePath + "instagram.png";
return ICON_BASE_PATH + "instagram.png";
} else if (type.equals(Account.Type.MESSAGING_APP)) {
return iconBasePath + "messaging.png";
return ICON_BASE_PATH + "messaging.png";
} else if (type.equals(Account.Type.PHONE)) {
return iconBasePath + "phone.png";
return ICON_BASE_PATH + "phone.png";
} else if (type.equals(Account.Type.TWITTER)) {
return iconBasePath + "twitter.png";
return ICON_BASE_PATH + "twitter.png";
} else if (type.equals(Account.Type.WEBSITE)) {
return iconBasePath + "web-file.png";
return ICON_BASE_PATH + "web-file.png";
} else if (type.equals(Account.Type.WHATSAPP)) {
return iconBasePath + "WhatsApp.png";
return ICON_BASE_PATH + "WhatsApp.png";
} else {
//there could be a default icon instead...
throw new IllegalArgumentException("Unknown Account.Type: " + type.getTypeName());

View File

@ -23,12 +23,14 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.logging.Level;
import javax.swing.AbstractAction;
import javax.swing.Action;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
import org.sleuthkit.autopsy.actions.AddContentTagAction;
import org.sleuthkit.autopsy.actions.DeleteFileContentTagAction;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.coreutils.ContextMenuExtensionPoint;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
@ -41,6 +43,7 @@ import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.LocalDirectory;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.autopsy.coreutils.Logger;
public class ExplorerNodeActionVisitor extends ContentVisitor.Default<List<? extends Action>> {
@ -71,8 +74,12 @@ public class ExplorerNodeActionVisitor extends ContentVisitor.Default<List<? ext
public List<? extends Action> visit(final Image img) {
List<Action> lst = new ArrayList<>();
//TODO lst.add(new ExtractAction("Extract Image", img));
try {
lst.add(new ExtractUnallocAction(
NbBundle.getMessage(this.getClass(), "ExplorerNodeActionVisitor.action.extUnallocToSingleFiles"), img));
} catch (NoCurrentCaseException ex) {
Logger.getLogger(ExplorerNodeActionVisitor.class.getName()).log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
}
return lst;
}
@ -86,6 +93,7 @@ public class ExplorerNodeActionVisitor extends ContentVisitor.Default<List<? ext
List<AbstractAction> lst = new ArrayList<>();
lst.add(new ExtractUnallocAction(
NbBundle.getMessage(this.getClass(), "ExplorerNodeActionVisitor.action.extUnallocToSingleFile"), vol));
return lst;
}

View File

@ -69,14 +69,20 @@ final class ExtractUnallocAction extends AbstractAction {
private long currentImage = 0L;
private final boolean isImage;
public ExtractUnallocAction(String title, Volume volume) {
public ExtractUnallocAction(String title, Volume volume){
super(title);
isImage = false;
try {
OutputFileData outputFileData = new OutputFileData(volume);
filesToExtract.add(outputFileData);
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
setEnabled(false);
}
public ExtractUnallocAction(String title, Image image) {
}
public ExtractUnallocAction(String title, Image image) throws NoCurrentCaseException {
super(title);
isImage = true;
currentImage = image.getId();
@ -595,15 +601,17 @@ final class ExtractUnallocAction extends AbstractAction {
* Contingency constructor in event no VolumeSystem exists on an Image.
*
* @param img Image file to be analyzed
*
* @throws NoCurrentCaseException if there is no open case.
*/
OutputFileData(Image img) {
OutputFileData(Image img) throws NoCurrentCaseException {
this.layoutFiles = getUnallocFiles(img);
Collections.sort(layoutFiles, new SortObjId());
this.volumeId = 0;
this.imageId = img.getId();
this.imageName = img.getName();
this.fileName = this.imageName + "-Unalloc-" + this.imageId + "-" + 0 + ".dat"; //NON-NLS
this.fileInstance = new File(Case.getCurrentCase().getExportDirectory() + File.separator + this.fileName);
this.fileInstance = new File(Case.getOpenCase().getExportDirectory() + File.separator + this.fileName);
this.sizeInBytes = calcSizeInBytes();
}
@ -611,8 +619,10 @@ final class ExtractUnallocAction extends AbstractAction {
* Default constructor for extracting info from Volumes.
*
* @param volume Volume file to be analyzed
*
* @throws NoCurrentCaseException if there is no open case.
*/
OutputFileData(Volume volume) {
OutputFileData(Volume volume) throws NoCurrentCaseException {
try {
this.imageName = volume.getDataSource().getName();
this.imageId = volume.getDataSource().getId();
@ -623,7 +633,7 @@ final class ExtractUnallocAction extends AbstractAction {
this.imageId = 0;
}
this.fileName = this.imageName + "-Unalloc-" + this.imageId + "-" + volumeId + ".dat"; //NON-NLS
this.fileInstance = new File(Case.getCurrentCase().getExportDirectory() + File.separator + this.fileName);
this.fileInstance = new File(Case.getOpenCase().getExportDirectory() + File.separator + this.fileName);
this.layoutFiles = getUnallocFiles(volume);
Collections.sort(layoutFiles, new SortObjId());
this.sizeInBytes = calcSizeInBytes();

View File

@ -57,13 +57,16 @@ final class DataSourceIngestJob {
/**
* These fields define a data source ingest job: the parent ingest job, an
* ID, the user's ingest job settings, and the data source to be processed.
* ID, the user's ingest job settings, and the data source to be analyzed.
* Optionally, there is a set of files to be analyzed instead of analyzing
* all of the files in the data source.
*/
private final IngestJob parentJob;
private static final AtomicLong nextJobId = new AtomicLong(0L);
private final long id;
private final IngestJobSettings settings;
private final Content dataSource;
private final List<AbstractFile> files = new ArrayList<>();
/**
* A data source ingest job runs in stages.
@ -171,7 +174,7 @@ final class DataSourceIngestJob {
/**
* Constructs an object that encapsulates a data source and the ingest
* module pipelines used to process it.
* module pipelines used to analyze it.
*
* @param parentJob The ingest job of which this data source ingest
* job is a part.
@ -181,9 +184,27 @@ final class DataSourceIngestJob {
* progress handles.
*/
DataSourceIngestJob(IngestJob parentJob, Content dataSource, IngestJobSettings settings, boolean runInteractively) {
this(parentJob, dataSource, Collections.emptyList(), settings, runInteractively);
}
/**
* Constructs an object that encapsulates a data source and the ingest
* module pipelines used to analyze it. Either all of the files in the data
* source or a given subset of the files will be analyzed.
*
* @param parentJob The ingest job of which this data source ingest
* job is a part.
* @param dataSource The data source to be ingested.
* @param files A subset of the files for the data source.
* @param settings The settings for the ingest job.
* @param runInteractively Whether or not this job should use NetBeans
* progress handles.
*/
DataSourceIngestJob(IngestJob parentJob, Content dataSource, List<AbstractFile> files, IngestJobSettings settings, boolean runInteractively) {
this.parentJob = parentJob;
this.id = DataSourceIngestJob.nextJobId.getAndIncrement();
this.dataSource = dataSource;
this.files.addAll(files);
this.settings = settings;
this.doUI = runInteractively;
this.createTime = new Date().getTime();
@ -497,13 +518,13 @@ final class DataSourceIngestJob {
*/
if (this.hasFirstStageDataSourceIngestPipeline() && this.hasFileIngestPipeline()) {
logger.log(Level.INFO, "Scheduling first stage data source and file level analysis tasks for {0} (jobId={1})", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleIngestTasks(this);
DataSourceIngestJob.taskScheduler.scheduleIngestTasks(this, this.files);
} else if (this.hasFirstStageDataSourceIngestPipeline()) {
logger.log(Level.INFO, "Scheduling first stage data source level analysis tasks for {0} (jobId={1}), no file level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleDataSourceIngestTask(this);
} else {
logger.log(Level.INFO, "Scheduling file level analysis tasks for {0} (jobId={1}), no first stage data source level analysis configured", new Object[]{dataSource.getName(), this.id}); //NON-NLS
DataSourceIngestJob.taskScheduler.scheduleFileIngestTasks(this);
DataSourceIngestJob.taskScheduler.scheduleFileIngestTasks(this, this.files);
/**
* No data source ingest task has been scheduled for this stage, and
@ -815,7 +836,7 @@ final class DataSourceIngestJob {
void addFiles(List<AbstractFile> files) {
if (DataSourceIngestJob.Stages.FIRST == this.stage) {
for (AbstractFile file : files) {
DataSourceIngestJob.taskScheduler.scheduleFileIngestTask(this, file);
DataSourceIngestJob.taskScheduler.scheduleFastTrackedFileIngestTask(this, file);
}
} else {
DataSourceIngestJob.logger.log(Level.SEVERE, "Adding files during second stage not supported"); //NON-NLS

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2016 Basis Technology Corp.
* Copyright 2014-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -29,13 +29,12 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.openide.util.NbBundle;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
* Runs a collection of data sources through a set of ingest modules specified
* via ingest job settings.
* <p>
* This class is thread-safe.
* Analyzes one or more data sources using a set of ingest modules specified via
* ingest job settings.
*/
public final class IngestJob {
@ -69,7 +68,7 @@ public final class IngestJob {
private volatile CancellationReason cancellationReason;
/**
* Constructs an ingest job that runs a collection of data sources through a
* Constructs an ingest job that analyzes one or more data sources using a
* set of ingest modules specified via ingest job settings.
*
* @param dataSources The data sources to be ingested.
@ -88,6 +87,26 @@ public final class IngestJob {
cancellationReason = CancellationReason.NOT_CANCELLED;
}
/**
* Constructs an ingest job that analyzes one data source using a set of
* ingest modules specified via ingest job settings. Either all of the files
* in the data source or a given subset of the files will be analyzed.
*
* @param dataSource The data source to be analyzed
* @param files A subset of the files for the data source.
* @param settings The ingest job settings.
* @param doUI Whether or not this job should use progress bars,
* message boxes for errors, etc.
*/
IngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings, boolean doUI) {
this.id = IngestJob.nextId.getAndIncrement();
this.dataSourceJobs = new ConcurrentHashMap<>();
DataSourceIngestJob dataSourceIngestJob = new DataSourceIngestJob(this, dataSource, files, settings, doUI);
this.dataSourceJobs.put(dataSourceIngestJob.getId(), dataSourceIngestJob);
incompleteJobsCount = new AtomicInteger(dataSourceJobs.size());
cancellationReason = CancellationReason.NOT_CANCELLED;
}
/**
* Gets the unique identifier assigned to this ingest job.
*

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -298,7 +298,7 @@ public class IngestManager {
/**
* Queues an ingest job for for one or more data sources.
*
* @param dataSources The data sources to process.
* @param dataSources The data sources to analyze.
* @param settings The settings for the ingest job.
*/
public void queueIngestJob(Collection<Content> dataSources, IngestJobSettings settings) {
@ -312,6 +312,25 @@ public class IngestManager {
}
}
/**
* Queues an ingest job for for a data source. Either all of the files in
* the data source or a given subset of the files will be analyzed.
*
* @param dataSource The data source to analyze.
* @param files A subset of the files for the data source.
* @param settings The settings for the ingest job.
*/
public void queueIngestJob(Content dataSource, List<AbstractFile> files, IngestJobSettings settings) {
if (caseIsOpen) {
IngestJob job = new IngestJob(dataSource, files, settings, RuntimeProperties.runningWithGUI());
if (job.hasIngestPipeline()) {
long taskId = nextIngestManagerTaskId.incrementAndGet();
Future<Void> task = startIngestJobsExecutor.submit(new StartIngestJobTask(taskId, job));
startIngestJobFutures.put(taskId, task);
}
}
}
/**
* Immdiately starts an ingest job for one or more data sources.
*

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012-2017 Basis Technology Corp.
* Copyright 2012-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -143,15 +143,14 @@ final class IngestTasksScheduler {
}
/**
* Schedules a data source ingest task and file ingest tasks for an ingest
* job.
* Schedules a data source level ingest task and file level ingest tasks for
* an ingest job. Either all of the files in the data source or a given
* subset of the files will be scheduled.
*
* @param job The job for which the tasks are to be scheduled.
*
* @throws InterruptedException if the calling thread is blocked due to a
* full tasks queue and is interrupted.
* @param job The data source ingest job.
* @param files A subset of the files for the data source.
*/
synchronized void scheduleIngestTasks(DataSourceIngestJob job) {
synchronized void scheduleIngestTasks(DataSourceIngestJob job, List<AbstractFile> files) {
if (!job.isCancelled()) {
// Scheduling of both a data source ingest task and file ingest tasks
// for a job must be an atomic operation. Otherwise, the data source
@ -159,14 +158,14 @@ final class IngestTasksScheduler {
// resulting in a potential false positive when another thread checks
// whether or not all the tasks for the job are completed.
this.scheduleDataSourceIngestTask(job);
this.scheduleFileIngestTasks(job);
this.scheduleFileIngestTasks(job, files);
}
}
/**
* Schedules a data source ingest task for an ingest job.
* Schedules a data source level ingest task for a data source ingest job.
*
* @param job The job for which the tasks are to be scheduled.
* @param job The data source ingest job.
*/
synchronized void scheduleDataSourceIngestTask(DataSourceIngestJob job) {
if (!job.isCancelled()) {
@ -186,16 +185,22 @@ final class IngestTasksScheduler {
}
/**
* Schedules file ingest tasks for an ingest job.
* Schedules file level ingest tasks for a data source ingest job. Either
* all of the files in the data source or a given subset of the files will
* be scheduled.
*
* @param job The job for which the tasks are to be scheduled.
* @param job The data source ingest job.
* @param files A subset of the files for the data source.
*/
synchronized void scheduleFileIngestTasks(DataSourceIngestJob job) {
synchronized void scheduleFileIngestTasks(DataSourceIngestJob job, List<AbstractFile> files) {
if (!job.isCancelled()) {
// Get the top level files for the data source associated with this job
// and add them to the root directories priority queue.
List<AbstractFile> topLevelFiles = getTopLevelFiles(job.getDataSource());
for (AbstractFile firstLevelFile : topLevelFiles) {
List<AbstractFile> candidateFiles = new ArrayList<>();
if (files.isEmpty()) {
getTopLevelFiles(job.getDataSource(), candidateFiles);
} else {
candidateFiles.addAll(files);
}
for (AbstractFile firstLevelFile : candidateFiles) {
FileIngestTask task = new FileIngestTask(job, firstLevelFile);
if (IngestTasksScheduler.shouldEnqueueFileTask(task)) {
this.tasksInProgress.add(task);
@ -207,12 +212,14 @@ final class IngestTasksScheduler {
}
/**
* Schedules a file ingest task for an ingest job.
* Schedules a file ingest task for a data source ingest job. The task that
* is created is added directly to the pending file tasks queues, i.e., it
* is "fast tracked."
*
* @param job The job for which the tasks are to be scheduled.
* @param file The file to be associated with the task.
* @param job The data source ingest job.
* @param file A file.
*/
synchronized void scheduleFileIngestTask(DataSourceIngestJob job, AbstractFile file) {
synchronized void scheduleFastTrackedFileIngestTask(DataSourceIngestJob job, AbstractFile file) {
if (!job.isCancelled()) {
FileIngestTask task = new FileIngestTask(job, file);
if (IngestTasksScheduler.shouldEnqueueFileTask(task)) {
@ -280,11 +287,9 @@ final class IngestTasksScheduler {
* tasks to put into the root directories queue.
*
* @param dataSource The data source.
*
* @return A list of top level files.
* @param topLevelFiles The top level files are added to this list.
*/
private static List<AbstractFile> getTopLevelFiles(Content dataSource) {
List<AbstractFile> topLevelFiles = new ArrayList<>();
private static void getTopLevelFiles(Content dataSource, List<AbstractFile> topLevelFiles) {
Collection<AbstractFile> rootObjects = dataSource.accept(new GetRootDirectoryVisitor());
if (rootObjects.isEmpty() && dataSource instanceof AbstractFile) {
// The data source is itself a file to be processed.
@ -312,7 +317,6 @@ final class IngestTasksScheduler {
}
}
}
return topLevelFiles;
}
/**
@ -405,7 +409,7 @@ final class IngestTasksScheduler {
return false;
}
/**
/*
* Check if the file is a member of the file ingest filter that is being
* applied to the current run of ingest, checks if unallocated space
* should be processed inside call to fileIsMemberOf
@ -414,8 +418,8 @@ final class IngestTasksScheduler {
return false;
}
// Skip the task if the file is one of a select group of special, large
// NTFS or FAT file system files.
// Skip the task if the file is one of a select group of special, large
// NTFS or FAT file system files.
if (file instanceof org.sleuthkit.datamodel.File) {
final org.sleuthkit.datamodel.File f = (org.sleuthkit.datamodel.File) file;

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2018 Basis Technology Corp.
* Copyright 2017-2018 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -23,6 +23,7 @@ import java.awt.event.ActionEvent;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.JOptionPane;
@ -31,26 +32,33 @@ import org.openide.DialogDisplayer;
import org.openide.WizardDescriptor;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.datamodel.SpecialDirectoryNode;
import org.sleuthkit.autopsy.ingest.IngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.TskCoreException;
/**
* This class is used to add the action to the run ingest modules menu item.
* When the data source is pressed, it should open the wizard for ingest
* modules.
* An action that invokes the Run Ingest Modules wizard for one or more data
* sources or for the children of a file.
*/
public final class RunIngestModulesAction extends AbstractAction {
@Messages("RunIngestModulesAction.name=Run Ingest Modules")
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(SpecialDirectoryNode.class.getName());
/*
* Note that the execution context is the name of the dialog that used to be
* used instead of this wizard and is retained for backwards compatibility.
*/
private static final String EXECUTION_CONTEXT = "org.sleuthkit.autopsy.ingest.RunIngestModulesDialog";
private final List<Content> dataSources = new ArrayList<>();
private final IngestJobSettings.IngestType ingestType;
private final AbstractFile parentFile;
/**
* Display any warnings that the ingestJobSettings have.
@ -67,12 +75,10 @@ public final class RunIngestModulesAction extends AbstractAction {
JOptionPane.showMessageDialog(WindowManager.getDefault().getMainWindow(), warningMessage.toString());
}
}
private final List<Content> dataSources = new ArrayList<>();
private final IngestJobSettings.IngestType ingestType;
/**
* Creates an action which will make a run ingest modules wizard when it is
* performed.
* Constructs an action that invokes the Run Ingest Modules wizard for one
* or more data sources.
*
* @param dataSources - the data sources you want to run ingest on
*/
@ -80,18 +86,26 @@ public final class RunIngestModulesAction extends AbstractAction {
this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name());
this.dataSources.addAll(dataSources);
this.ingestType = IngestJobSettings.IngestType.ALL_MODULES;
this.parentFile = null;
}
/**
* Creates an action which will make a run ingest modules wizard when it is
* performed.
* Constructs an action that invokes the Run Ingest Modules wizard for the
* children of a file.
*
* @param dir - the directory you want to run ingest on
* @param file The file.
*/
public RunIngestModulesAction(Directory dir) {
public RunIngestModulesAction(AbstractFile parentFile) {
this.putValue(Action.NAME, Bundle.RunIngestModulesAction_name());
this.dataSources.add(dir);
this.parentFile = parentFile;
this.ingestType = IngestJobSettings.IngestType.FILES_ONLY;
try {
this.setEnabled(parentFile.hasChildren());
} catch (TskCoreException ex) {
this.setEnabled(false);
logger.log(Level.SEVERE, String.format("Failed to get children count for parent file %s (objId=%d), RunIngestModulesAction disabled", parentFile.getName(), parentFile.getId()), ex);
MessageNotifyUtil.Message.error(Bundle.RunIngestModulesAction_actionPerformed_errorMessage());
}
}
/**
@ -99,6 +113,9 @@ public final class RunIngestModulesAction extends AbstractAction {
*
* @param e the action event
*/
@Messages({
"RunIngestModulesAction.actionPerformed.errorMessage=Error querying the case database for the selected item."
})
@Override
public void actionPerformed(ActionEvent e) {
/**
@ -118,7 +135,26 @@ public final class RunIngestModulesAction extends AbstractAction {
if (DialogDisplayer.getDefault().notify(wiz) == WizardDescriptor.FINISH_OPTION) {
IngestJobSettings ingestJobSettings = wizard.getIngestJobSettings();
showWarnings(ingestJobSettings);
if (this.parentFile == null) {
IngestManager.getInstance().queueIngestJob(this.dataSources, ingestJobSettings);
} else {
try {
Content dataSource = parentFile.getDataSource();
List<Content> children = parentFile.getChildren();
List<AbstractFile> files = new ArrayList<>();
for (Content child : children) {
if (child instanceof AbstractFile) {
files.add((AbstractFile) child);
}
}
if (!files.isEmpty()) {
IngestManager.getInstance().queueIngestJob(dataSource, files, ingestJobSettings);
}
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, String.format("Failed to get data source or children for parent file %s (objId=%d), action failed", parentFile.getName(), parentFile.getId()), ex);
MessageNotifyUtil.Message.error(Bundle.RunIngestModulesAction_actionPerformed_errorMessage());
}
}
}
}

View File

@ -26,6 +26,8 @@ import org.sleuthkit.datamodel.TskCoreException;
/**
* An interface for implementations of a keyword search service.
* You can find the implementations by using Lookup, such as:
* Lookup.getDefault().lookup(KeywordSearchService.class)
*
* TODO (AUT-2158: This interface should not extend Closeable.
*/

View File

@ -25,6 +25,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleIngestJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.NoIngestModuleIngestJobSettings;
/**
* A factory for creating instances of file ingest modules that carve
@ -81,7 +82,17 @@ public class PhotoRecCarverIngestModuleFactory extends IngestModuleFactoryAdapte
@Override
public IngestModuleIngestJobSettingsPanel getIngestJobSettingsPanel(IngestModuleIngestJobSettings settings) {
if (settings instanceof PhotoRecCarverIngestJobSettings) {
return new PhotoRecCarverIngestJobSettingsPanel((PhotoRecCarverIngestJobSettings) settings);
}
/*
* Compatibility check for older versions.
*/
if (settings instanceof NoIngestModuleIngestJobSettings) {
return new PhotoRecCarverIngestJobSettingsPanel(new PhotoRecCarverIngestJobSettings());
}
throw new IllegalArgumentException("Expected settings argument to be an instance of PhotoRecCarverIngestJobSettings");
}
}

View File

@ -33,7 +33,7 @@ import org.sleuthkit.datamodel.Content;
/**
* A utility that runs an ingest job, blocking until the job is completed.
*/
public final class IngestRunner {
public final class IngestJobRunner {
/**
* Runs an ingest job, blocking until the job is completed.
@ -70,7 +70,7 @@ public final class IngestRunner {
/**
* IngestRunner instances cannot be instatiated.
*/
private IngestRunner() {
private IngestJobRunner() {
}
/**

View File

@ -41,8 +41,8 @@ import org.sleuthkit.datamodel.*;
abstract class Extract {
protected Case currentCase = Case.getCurrentCase();
protected SleuthkitCase tskCase = currentCase.getSleuthkitCase();
protected Case currentCase;
protected SleuthkitCase tskCase;
private final Logger logger = Logger.getLogger(this.getClass().getName());
private final ArrayList<String> errorMessages = new ArrayList<>();
String moduleName = "";
@ -52,6 +52,12 @@ abstract class Extract {
}
void init() throws IngestModuleException {
try {
currentCase = Case.getOpenCase();
tskCase = currentCase.getSleuthkitCase();
} catch (NoCurrentCaseException ex) {
throw new IngestModuleException(Bundle.Extract_indexError_message(), ex);
}
}
abstract void process(Content dataSource, IngestJobContext context);

Binary file not shown.

Before

Width:  |  Height:  |  Size: 28 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 16 KiB

View File

@ -0,0 +1,33 @@
/*! \page live_triage_page Live Triage
\section live_triage_overview Overview
The Live Triage feature allows you to load Autopsy onto a removable drive to run on target systems while making minimal changes to that target system. This will currently only work on Windows systems.
\section live_triage_create_drive Creating a live triage drive
To create a live triage drive, go to Tools->Make Live Triage Drive to bring up the main dialog.
\image html live_triage_dialog.png
Select the drive you want to use - any type of USB storage device will work. For best results use the fastest drive available. Once the process is complete the root folder will contain an Autopsy folder and a RunFromUSB.bat file.
\section live_triage_usage Running Autopsy from the live triage drive
Insert the drive into the target machine and browse to it in Windows Explorer. Right click on RunFromUSB.bat and select "Run as administrator". This is necessary to analyze the local drives.
\image html live_triage_script.png
Running the script will generate a few more directories on the USB drive. The configData directory stores all the data used by Autopsy - primarily configuration files and temporary files. You can make changes to the Autopsy settings and they will persist between runs. The cases directory is created as a recommended place to save your case data. You will need to browse to it when creating a case in Autopsy.
Once Autopsy is running, proceed to create a case as normal, making sure to save it on the USB drive.
\image html live_triage_case.png
Then choose the Local Disk data source and select the desired drive.
\image html live_triage_ds.png
See the \ref ds_local page for more information on local disk data sources.
*/

View File

@ -60,6 +60,7 @@ The following topics are available here:
- \subpage windows_authentication
- \subpage multiuser_sec_page
- \subpage multiuser_page
- \subpage live_triage_page
- \subpage advanced_page
If the topic you need is not listed, refer to the <a href="http://wiki.sleuthkit.org/index.php?title=Autopsy_User%27s_Guide">Autopsy Wiki</a> or join the <a href="https://lists.sourceforge.net/lists/listinfo/sleuthkit-users">SleuthKit User List</a> at SourceForge.

View File

@ -75,6 +75,19 @@ Typically a general report module should interact with both the Blackboard API i
You should call org.sleuthkit.autopsy.casemodule.Case.addReport() with the path to your report so that it is shown in the Autopsy tree. You can specify a specific file or folder and the user can then view it later.
\subsection report_create_module_indexing Indexing Reports
After you have called org.sleuthkit.autopsy.casemodule.Case.addReport() and created a report, you can pass it to org.sleuthkit.autopsy.keywordsearchservice.KeywordSearchService.index() so that it is indexed and can then be found by a user. This is most commonly used when an Ingest Module runs a 3rd party tool and the output of that tool is added back into Autopsy as a report. Here is some example code:
\code{.java}
KeywordSearchService searchService = Lookup.getDefault().lookup(KeywordSearchService.class);
if (null == searchService) {
logger.log(Level.WARNING, "Keyword search service not found. Report will not be indexed");
} else {
searchService.index(report);
}
\endcode
\subsection report_create_module_layer Installing your Report Module
Report modules developed using Java must be registered in a layer.xml file. This file allows Autopsy to find the report module.

View File

@ -56,9 +56,10 @@
<rule ref="rulesets/java/comments.xml/CommentRequired">
<properties>
<!-- Disabled because we have lots of undocumented fields -->
<property name="fieldCommentRequirement" value="Unwanted"/>
<property name="fieldCommentRequirement" value="Ignored"/>
<!-- Disabled because we don't require comments on overrides of public fields -->
<property name="publicMethodCommentRequirement" value="Unwanted"/>
<property name="publicMethodCommentRequirement" value="Required"/>
<!--<property name="methodWithOverrideCommentRequirement" value="Unwanted"/>-->
</properties>
</rule>
<!-- Commented out because it was flagged some of our header / copyright comments

View File

@ -56,6 +56,7 @@ import org.apache.james.mime4j.stream.MimeConfig;
import org.apache.tika.parser.txt.CharsetDetector;
import org.apache.tika.parser.txt.CharsetMatch;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.EncodedFileOutputStream;
@ -267,8 +268,18 @@ class MboxParser {
* @param email
* @param e
*/
@NbBundle.Messages ({"MboxParser.handleAttch.noOpenCase.errMsg=Exception while getting open case."})
private void handleAttachment(EmailMessage email, Entity e, long fileID, int index) {
String outputDirPath = ThunderbirdMboxFileIngestModule.getModuleOutputPath() + File.separator;
String outputDirPath;
String relModuleOutputPath;
try {
outputDirPath = ThunderbirdMboxFileIngestModule.getModuleOutputPath() + File.separator;
relModuleOutputPath = ThunderbirdMboxFileIngestModule.getRelModuleOutputPath() + File.separator;
} catch (NoCurrentCaseException ex) {
addErrorMessage(Bundle.MboxParser_handleAttch_noOpenCase_errMsg());
logger.log(Level.SEVERE, Bundle.MboxParser_handleAttch_noOpenCase_errMsg(), ex); //NON-NLS
return;
}
String filename = e.getFilename();
// sanitize name. Had an attachment with a Japanese encoded path that
@ -325,8 +336,7 @@ class MboxParser {
EmailMessage.Attachment attach = new EmailMessage.Attachment();
attach.setName(filename);
attach.setLocalPath(ThunderbirdMboxFileIngestModule.getRelModuleOutputPath()
+ File.separator + uniqueFilename);
attach.setLocalPath(relModuleOutputPath + uniqueFilename);
attach.setSize(new File(outPath).length());
attach.setEncodingType(TskData.EncodingType.XOR1);
email.addAttachment(attach);

View File

@ -33,6 +33,8 @@ import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestMonitor;
import org.sleuthkit.autopsy.ingest.IngestServices;
import static org.sleuthkit.autopsy.thunderbirdparser.ThunderbirdMboxFileIngestModule.getRelModuleOutputPath;
@ -204,9 +206,16 @@ class PstParser {
* @param email
* @param msg
*/
@NbBundle.Messages({"PstParser.noOpenCase.errMsg=Exception while getting open case."})
private void extractAttachments(EmailMessage email, PSTMessage msg, long fileID) {
int numberOfAttachments = msg.getNumberOfAttachments();
String outputDirPath = ThunderbirdMboxFileIngestModule.getModuleOutputPath() + File.separator;
String outputDirPath;
try {
outputDirPath = ThunderbirdMboxFileIngestModule.getModuleOutputPath() + File.separator;
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return;
}
for (int x = 0; x < numberOfAttachments; x++) {
String filename = "";
try {
@ -246,6 +255,9 @@ class PstParser {
NbBundle.getMessage(this.getClass(), "PstParser.extractAttch.errMsg.failedToExtractToDisk",
filename));
logger.log(Level.WARNING, "Failed to extract attachment from pst file.", ex); //NON-NLS
} catch (NoCurrentCaseException ex) {
addErrorMessage(Bundle.PstParser_noOpenCase_errMsg());
logger.log(Level.SEVERE, Bundle.PstParser_noOpenCase_errMsg(), ex); //NON-NLS
}
}
}

View File

@ -31,6 +31,7 @@ import java.util.regex.Pattern;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
import org.sleuthkit.autopsy.casemodule.services.Blackboard;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.Logger;
@ -74,15 +75,26 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
}
@Override
@Messages ({"ThunderbirdMboxFileIngestModule.noOpenCase.errMsg=Exception while getting open case."})
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
fileManager = Case.getCurrentCase().getServices().getFileManager();
try {
fileManager = Case.getOpenCase().getServices().getFileManager();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
throw new IngestModuleException(Bundle.ThunderbirdMboxFileIngestModule_noOpenCase_errMsg(), ex);
}
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
blackboard = Case.getCurrentCase().getServices().getBlackboard();
try {
blackboard = Case.getOpenCase().getServices().getBlackboard();
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex);
return ProcessResult.ERROR;
}
// skip known
if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) {
@ -133,8 +145,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
*/
@Messages({"ThunderbirdMboxFileIngestModule.processPst.indexError.message=Failed to index encryption detected artifact for keyword search."})
private ProcessResult processPst(AbstractFile abstractFile) {
String fileName = getTempPath() + File.separator + abstractFile.getName()
String fileName;
try {
fileName = getTempPath() + File.separator + abstractFile.getName()
+ "-" + String.valueOf(abstractFile.getId());
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
File file = new File(fileName);
long freeSpace = services.getFreeDiskSpace();
@ -159,8 +177,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
PstParser.ParseResult result = parser.parse(file, abstractFile.getId());
if (result == PstParser.ParseResult.OK) {
try {
// parse success: Process email and add artifacts
processEmails(parser.getResults(), abstractFile);
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
} else if (result == PstParser.ParseResult.ENCRYPT) {
// encrypted pst: Add encrypted file artifact
try {
@ -225,8 +249,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
emailFolder = emailFolder + mboxFileName;
emailFolder = emailFolder.replaceAll(".sbd", ""); //NON-NLS
String fileName = getTempPath() + File.separator + abstractFile.getName()
String fileName;
try {
fileName = getTempPath() + File.separator + abstractFile.getName()
+ "-" + String.valueOf(abstractFile.getId());
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
File file = new File(fileName);
long freeSpace = services.getFreeDiskSpace();
@ -249,7 +279,12 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
MboxParser parser = new MboxParser(services, emailFolder);
List<EmailMessage> emails = parser.parse(file, abstractFile.getId());
try {
processEmails(emails, abstractFile);
} catch (NoCurrentCaseException ex) {
logger.log(Level.SEVERE, "Exception while getting open case.", ex); //NON-NLS
return ProcessResult.ERROR;
}
if (file.delete() == false) {
logger.log(Level.INFO, "Failed to delete temp file: {0}", file.getName()); //NON-NLS
@ -268,10 +303,11 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
/**
* Get a path to a temporary folder.
*
* @return
* @throws NoCurrentCaseException if there is no open case.
* @return the temporary folder
*/
public static String getTempPath() {
String tmpDir = Case.getCurrentCase().getTempDirectory() + File.separator
public static String getTempPath() throws NoCurrentCaseException {
String tmpDir = Case.getOpenCase().getTempDirectory() + File.separator
+ "EmailParser"; //NON-NLS
File dir = new File(tmpDir);
if (dir.exists() == false) {
@ -280,8 +316,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
return tmpDir;
}
public static String getModuleOutputPath() {
String outDir = Case.getCurrentCase().getModuleDirectory() + File.separator
/**
* Get a module output folder.
*
* @throws NoCurrentCaseException if there is no open case.
* @return the module output folder
*/
public static String getModuleOutputPath() throws NoCurrentCaseException {
String outDir = Case.getOpenCase().getModuleDirectory() + File.separator
+ EmailParserModuleFactory.getModuleName();
File dir = new File(outDir);
if (dir.exists() == false) {
@ -290,8 +332,14 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
return outDir;
}
public static String getRelModuleOutputPath() {
return Case.getCurrentCase().getModuleOutputDirectoryRelativePath() + File.separator
/**
* Get a relative path of a module output folder.
*
* @throws NoCurrentCaseException if there is no open case.
* @return the relative path of the module output folder
*/
public static String getRelModuleOutputPath() throws NoCurrentCaseException {
return Case.getOpenCase().getModuleOutputDirectoryRelativePath() + File.separator
+ EmailParserModuleFactory.getModuleName();
}
@ -301,8 +349,9 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
*
* @param emails
* @param abstractFile
* @throws NoCurrentCaseException if there is no open case.
*/
private void processEmails(List<EmailMessage> emails, AbstractFile abstractFile) {
private void processEmails(List<EmailMessage> emails, AbstractFile abstractFile) throws NoCurrentCaseException {
List<AbstractFile> derivedFiles = new ArrayList<>();
@ -386,9 +435,10 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
*
* @param email
* @param abstractFile
* @throws NoCurrentCaseException if there is no open case.
*/
@Messages({"ThunderbirdMboxFileIngestModule.addArtifact.indexError.message=Failed to index email message detected artifact for keyword search."})
private BlackboardArtifact addArtifact(EmailMessage email, AbstractFile abstractFile) {
private BlackboardArtifact addArtifact(EmailMessage email, AbstractFile abstractFile) throws NoCurrentCaseException {
BlackboardArtifact bbart = null;
List<BlackboardAttribute> bbattributes = new ArrayList<>();
String to = email.getRecipients();
@ -409,10 +459,13 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
senderAddressList.addAll(findEmailAddresess(from));
AccountFileInstance senderAccountInstance = null;
Case openCase = Case.getOpenCase();
if (senderAddressList.size() == 1) {
senderAddress = senderAddressList.get(0);
try {
senderAccountInstance = Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, senderAddress, EmailParserModuleFactory.getModuleName(), abstractFile);
senderAccountInstance = openCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, senderAddress, EmailParserModuleFactory.getModuleName(), abstractFile);
}
catch(TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create account for email address " + senderAddress, ex); //NON-NLS
@ -431,7 +484,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
recipientAddresses.forEach((addr) -> {
try {
AccountFileInstance recipientAccountInstance =
Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, addr,
openCase.getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.EMAIL, addr,
EmailParserModuleFactory.getModuleName(), abstractFile);
recipientAccountInstances.add(recipientAccountInstance);
}
@ -467,7 +520,7 @@ public final class ThunderbirdMboxFileIngestModule implements FileIngestModule {
bbart.addAttributes(bbattributes);
// Add account relationships
Case.getCurrentCase().getSleuthkitCase().getCommunicationsManager().addRelationships(senderAccountInstance, recipientAccountInstances, bbart,Relationship.Type.MESSAGE, dateL);
openCase.getSleuthkitCase().getCommunicationsManager().addRelationships(senderAccountInstance, recipientAccountInstances, bbart,Relationship.Type.MESSAGE, dateL);
try {
// index the artifact for keyword search