Merge branch 'develop' of https://github.com/sleuthkit/autopsy into keywordSearch

This commit is contained in:
Nick Davis 2014-04-11 14:47:21 -04:00
commit 6088b62b75
33 changed files with 681 additions and 678 deletions

View File

@ -208,11 +208,11 @@ public class PlatformUtil {
* @throws IOException exception thrown if extract the file failed for IO
* reasons
*/
public static <T> boolean extractResourceToUserConfigDir(final Class<T> resourceClass, final String resourceFile) throws IOException {
public static <T> boolean extractResourceToUserConfigDir(final Class<T> resourceClass, final String resourceFile, boolean overWrite) throws IOException {
final File userDir = new File(getUserConfigDirectory());
final File resourceFileF = new File(userDir + File.separator + resourceFile);
if (resourceFileF.exists()) {
if (resourceFileF.exists() && !overWrite) {
return false;
}

View File

@ -70,7 +70,7 @@ public class XMLUtil {
*/
public static <T> boolean xmlIsValid(DOMSource xmlfile, Class<T> clazz, String schemaFile) {
try{
PlatformUtil.extractResourceToUserConfigDir(clazz, schemaFile);
PlatformUtil.extractResourceToUserConfigDir(clazz, schemaFile, false);
File schemaLoc = new File(PlatformUtil.getUserConfigDirectory() + File.separator + schemaFile);
SchemaFactory schm = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
try{

View File

@ -35,7 +35,7 @@ import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
@ -46,23 +46,17 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.TskData;
/**
* Sample data source ingest module that doesn't do much. Demonstrates per
* ingest job module settings, use of a subset of the available ingest services
* and thread-safe sharing of per ingest job resources.
* <p>
* IMPORTANT TIP: This sample data source ingest module directly implements
* DataSourceIngestModule, which extends IngestModule. A practical alternative,
* recommended if you do not need to provide implementations of all of the
* IngestModule methods, is to extend the abstract class IngestModuleAdapter to
* get default "do nothing" implementations of the IngestModule methods.
* and thread-safe sharing of per ingest job data.
*/
class SampleDataSourceIngestModule implements DataSourceIngestModule {
class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSourceIngestModule {
private static final HashMap<Long, Integer> moduleReferenceCountsForIngestJobs = new HashMap<>();
private static final HashMap<Long, Long> fileCountsForIngestJobs = new HashMap<>();
private final boolean skipKnownFiles;
private IngestJobContext context = null;
@ -71,58 +65,20 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
this.skipKnownFiles = settings.skipKnownFiles();
}
/**
* Invoked by Autopsy to allow an ingest module instance to set up any
* internal data structures and acquire any private resources it will need
* during an ingest job.
* <p>
* Autopsy will generally use several instances of an ingest module for each
* ingest job it performs. Completing an ingest job entails processing a
* single data source (e.g., a disk image) and all of the files from the
* data source, including files extracted from archives and any unallocated
* space (made to look like a series of files). The data source is passed
* through one or more pipelines of data source ingest modules. The files
* are passed through one or more pipelines of file ingest modules.
* <p>
* Autopsy may use multiple threads to complete an ingest job, but it is
* guaranteed that there will be no more than one module instance per
* thread. However, if the module instances must share resources, the
* modules are responsible for synchronizing access to the shared resources
* and doing reference counting as required to release those resources
* correctly. Also, more than one ingest job may be in progress at any given
* time. This must also be taken into consideration when sharing resources
* between module instances.
* <p>
* An ingest module that does not require initialization may extend the
* abstract IngestModuleAdapter class to get a default "do nothing"
* implementation of this method.
*
* @param context Provides data and services specific to the ingest job and
* the ingest pipeline of which the module is a part.
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
// This method is thread-safe with per ingest job reference counting.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
initFileCount(context.getJobId());
}
/**
* Processes a data source.
*
* @param dataSource The data source to process.
* @param statusHelper A status helper to be used to report progress and
* detect ingest job cancellation.
* @return A result code indicating success or failure of the processing.
*/
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
// There are two tasks to do. Use the status helper to set the the
// progress bar to determinate and to set the remaining number of work
// units to be completed.
statusHelper.switchToDeterminate(2);
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
// There are two tasks to do. Set the the progress bar to determinate
// and set the remaining number of work units to be completed to two.
progressBar.switchToDeterminate(2);
Case autopsyCase = Case.getCurrentCase();
SleuthkitCase sleuthkitCase = autopsyCase.getSleuthkitCase();
@ -138,7 +94,7 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
}
}
statusHelper.progress(1);
progressBar.progress(1);
// Get files by creation time.
long currentTime = System.currentTimeMillis() / 1000;
@ -150,10 +106,12 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
}
}
// This method is thread-safe and keeps per ingest job counters.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
addToFileCount(context.getJobId(), fileCount);
statusHelper.progress(1);
progressBar.progress(1);
return IngestModule.ProcessResult.OK;
} catch (TskCoreException ex) {
IngestServices ingestServices = IngestServices.getInstance();
@ -161,53 +119,20 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
logger.log(Level.SEVERE, "File query failed", ex);
return IngestModule.ProcessResult.ERROR;
}
return IngestModule.ProcessResult.OK;
}
/**
* Invoked by Autopsy when an ingest job is completed, before the ingest
* module instance is discarded. The module should respond by doing things
* like releasing private resources, submitting final results, and posting a
* final ingest message.
* <p>
* Autopsy will generally use several instances of an ingest module for each
* ingest job it performs. Completing an ingest job entails processing a
* single data source (e.g., a disk image) and all of the files from the
* data source, including files extracted from archives and any unallocated
* space (made to look like a series of files). The data source is passed
* through one or more pipelines of data source ingest modules. The files
* are passed through one or more pipelines of file ingest modules.
* <p>
* Autopsy may use multiple threads to complete an ingest job, but it is
* guaranteed that there will be no more than one module instance per
* thread. However, if the module instances must share resources, the
* modules are responsible for synchronizing access to the shared resources
* and doing reference counting as required to release those resources
* correctly. Also, more than one ingest job may be in progress at any given
* time. This must also be taken into consideration when sharing resources
* between module instances.
* <p>
* An ingest module that does not require initialization may extend the
* abstract IngestModuleAdapter class to get a default "do nothing"
* implementation of this method.
*/
@Override
public void shutDown(boolean ingestJobCancelled) {
// This method is thread-safe with per ingest job reference counting.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
postFileCount(context.getJobId());
}
synchronized static void initFileCount(long ingestJobId) {
Integer moduleReferenceCount;
if (!moduleReferenceCountsForIngestJobs.containsKey(ingestJobId)) {
moduleReferenceCount = 1;
Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId);
if (refCount == 1) {
fileCountsForIngestJobs.put(ingestJobId, 0L);
} else {
moduleReferenceCount = moduleReferenceCountsForIngestJobs.get(ingestJobId);
++moduleReferenceCount;
}
moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount);
}
synchronized static void addToFileCount(long ingestJobId, long countToAdd) {
@ -217,9 +142,8 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
}
synchronized static void postFileCount(long ingestJobId) {
Integer moduleReferenceCount = moduleReferenceCountsForIngestJobs.remove(ingestJobId);
--moduleReferenceCount;
if (moduleReferenceCount == 0) {
Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId);
if (refCount == 0) {
Long filesCount = fileCountsForIngestJobs.remove(ingestJobId);
String msgText = String.format("Found %d files", filesCount);
IngestMessage message = IngestMessage.createMessage(
@ -227,8 +151,6 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule {
SampleIngestModuleFactory.getModuleName(),
msgText);
IngestServices.getInstance().postMessage(message);
} else {
moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount);
}
}
}

View File

@ -37,6 +37,7 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
@ -50,17 +51,10 @@ import org.sleuthkit.datamodel.TskData;
/**
* Sample file ingest module that doesn't do much. Demonstrates per ingest job
* module settings, use of a subset of the available ingest services and
* thread-safe sharing of per ingest job resources.
* <p>
* IMPORTANT TIP: This sample data source ingest module directly implements
* FileIngestModule, which extends IngestModule. A practical alternative,
* recommended if you do not need to provide implementations of all of the
* IngestModule methods, is to extend the abstract class IngestModuleAdapter to
* get default "do nothing" implementations of the IngestModule methods.
* thread-safe sharing of per ingest job data.
*/
class SampleFileIngestModule implements FileIngestModule {
class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final HashMap<Long, Integer> moduleReferenceCountsForIngestJobs = new HashMap<>();
private static final HashMap<Long, Long> artifactCountsForIngestJobs = new HashMap<>();
private static int attrId = -1;
private final boolean skipKnownFiles;
@ -70,36 +64,6 @@ class SampleFileIngestModule implements FileIngestModule {
this.skipKnownFiles = settings.skipKnownFiles();
}
/**
* Invoked by Autopsy to allow an ingest module instance to set up any
* internal data structures and acquire any private resources it will need
* during an ingest job.
* <p>
* Autopsy will generally use several instances of an ingest module for each
* ingest job it performs. Completing an ingest job entails processing a
* single data source (e.g., a disk image) and all of the files from the
* data source, including files extracted from archives and any unallocated
* space (made to look like a series of files). The data source is passed
* through one or more pipelines of data source ingest modules. The files
* are passed through one or more pipelines of file ingest modules.
* <p>
* Autopsy may use multiple threads to complete an ingest job, but it is
* guaranteed that there will be no more than one module instance per
* thread. However, if the module instances must share resources, the
* modules are responsible for synchronizing access to the shared resources
* and doing reference counting as required to release those resources
* correctly. Also, more than one ingest job may be in progress at any given
* time. This must also be taken into consideration when sharing resources
* between module instances.
* <p>
* An ingest module that does not require initialization may extend the
* abstract IngestModuleAdapter class to get a default "do nothing"
* implementation of this method.
*
* @param context Provides data and services specific to the ingest job and
* the ingest pipeline of which the module is a part.
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
@ -132,16 +96,11 @@ class SampleFileIngestModule implements FileIngestModule {
}
}
// This method is thread-safe with per ingest job reference counting.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
initBlackboardPostCount(context.getJobId());
}
/**
* Processes a file.
*
* @param file The file.
* @return A result code indicating success or failure of the processing.
*/
@Override
public IngestModule.ProcessResult process(AbstractFile file) {
if (attrId != -1) {
@ -182,7 +141,8 @@ class SampleFileIngestModule implements FileIngestModule {
BlackboardArtifact art = file.getGenInfoArtifact();
art.addAttribute(attr);
// Thread-safe.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
addToBlackboardPostCount(context.getJobId(), 1L);
// Fire an event to notify any listeners for blackboard postings.
@ -199,49 +159,18 @@ class SampleFileIngestModule implements FileIngestModule {
}
}
/**
* Invoked by Autopsy when an ingest job is completed, before the ingest
* module instance is discarded. The module should respond by doing things
* like releasing private resources, submitting final results, and posting a
* final ingest message.
* <p>
* Autopsy will generally use several instances of an ingest module for each
* ingest job it performs. Completing an ingest job entails processing a
* single data source (e.g., a disk image) and all of the files from the
* data source, including files extracted from archives and any unallocated
* space (made to look like a series of files). The data source is passed
* through one or more pipelines of data source ingest modules. The files
* are passed through one or more pipelines of file ingest modules.
* <p>
* Autopsy may use multiple threads to complete an ingest job, but it is
* guaranteed that there will be no more than one module instance per
* thread. However, if the module instances must share resources, the
* modules are responsible for synchronizing access to the shared resources
* and doing reference counting as required to release those resources
* correctly. Also, more than one ingest job may be in progress at any given
* time. This must also be taken into consideration when sharing resources
* between module instances.
* <p>
* An ingest module that does not require initialization may extend the
* abstract IngestModuleAdapter class to get a default "do nothing"
* implementation of this method.
*/
@Override
public void shutDown(boolean ingestJobCancelled) {
// This method is thread-safe with per ingest job reference counting.
// This method is thread-safe with per ingest job reference counted
// management of shared data.
reportBlackboardPostCount(context.getJobId());
}
synchronized static void initBlackboardPostCount(long ingestJobId) {
Integer moduleReferenceCount;
if (!moduleReferenceCountsForIngestJobs.containsKey(ingestJobId)) {
moduleReferenceCount = 1;
Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId);
if (refCount == 1) {
artifactCountsForIngestJobs.put(ingestJobId, 0L);
} else {
moduleReferenceCount = moduleReferenceCountsForIngestJobs.get(ingestJobId);
++moduleReferenceCount;
}
moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount);
}
synchronized static void addToBlackboardPostCount(long ingestJobId, long countToAdd) {
@ -251,9 +180,8 @@ class SampleFileIngestModule implements FileIngestModule {
}
synchronized static void reportBlackboardPostCount(long ingestJobId) {
Integer moduleReferenceCount = moduleReferenceCountsForIngestJobs.remove(ingestJobId);
--moduleReferenceCount;
if (moduleReferenceCount == 0) {
Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId);
if (refCount == 0) {
Long filesCount = artifactCountsForIngestJobs.remove(ingestJobId);
String msgText = String.format("Posted %d times to the blackboard", filesCount);
IngestMessage message = IngestMessage.createMessage(
@ -261,8 +189,6 @@ class SampleFileIngestModule implements FileIngestModule {
SampleIngestModuleFactory.getModuleName(),
msgText);
IngestServices.getInstance().postMessage(message);
} else {
moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount);
}
}
}

View File

@ -21,7 +21,6 @@ IngestJob.progress.fileIngest.displayName=File Ingest of {0}
IngestJob.progress.cancelling={0} (Cancelling...)
IngestJobConfigurationPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images.
IngestJobConfigurationPanel.processUnallocCheckbox.text=Process Unallocated Space
IngestJobConfigurationPanel.advancedButton.text=Advanced
IngestJob.toString.text=ScheduledTask'{'input\={0}, modules\={1}'}'
IngestJobLauncher.modName.tbirdParser.text=Thunderbird Parser
IngestJobLauncher.modName.mboxParser.text=MBox Parser
@ -98,3 +97,6 @@ IngestScheduler.remove.exception.notSupported.msg=Not supported.
IngestScheduler.DataSourceScheduler.exception.next.msg=There is no data source tasks in the queue, check hasNext()
IngestScheduler.DataSourceScheduler.exception.remove.msg=Removing of scheduled data source ingest tasks is not supported.
IngestScheduler.DataSourceScheduler.toString.size=DataSourceQueue, size\: {0}
Label1
IngestJobConfigurationPanel.advancedButton.text=Advanced
IngestJobConfigurationPanel.advancedButton.actionCommand=Advanced

View File

@ -35,5 +35,5 @@ public interface DataSourceIngestModule extends IngestModule {
* detect ingest job cancellation.
* @return A result code indicating success or failure of the processing.
*/
ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper);
ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper);
}

View File

@ -18,33 +18,19 @@
*/
package org.sleuthkit.autopsy.ingest;
import org.netbeans.api.progress.ProgressHandle;
/**
* Used by data source ingest modules to report progress and detect data source
* ingest job cancellation.
* Used by data source ingest modules to report progress.
*/
public class DataSourceIngestModuleStatusHelper {
public class DataSourceIngestModuleProgress {
private final IngestJob ingestJob;
private final String moduleDisplayName;
DataSourceIngestModuleStatusHelper(IngestJob ingestJob, String moduleDisplayName) {
DataSourceIngestModuleProgress(IngestJob ingestJob, String moduleDisplayName) {
this.ingestJob = ingestJob;
this.moduleDisplayName = moduleDisplayName;
}
/**
* Checks for ingest job cancellation. This should be polled by the module
* in its process() method. If the ingest task is canceled, the module
* should return from its process() method as quickly as possible.
*
* @return True if the task has been canceled, false otherwise.
*/
public boolean isIngestJobCancelled() {
return (ingestJob.isCancelled());
}
/**
* Updates the progress bar and switches it to determinate mode. This should
* be called by the module as soon as the number of total work units

View File

@ -79,7 +79,7 @@ final class DataSourceIngestPipeline {
List<IngestModuleError> errors = new ArrayList<>();
for (DataSourceIngestModuleDecorator module : this.modules) {
try {
module.process(job.getDataSource(), new DataSourceIngestModuleStatusHelper(job, module.getDisplayName()));
module.process(job.getDataSource(), new DataSourceIngestModuleProgress(job, module.getDisplayName()));
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
@ -126,7 +126,7 @@ final class DataSourceIngestPipeline {
}
@Override
public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
return module.process(dataSource, statusHelper);
}

View File

@ -13,7 +13,7 @@
<Dimension value="[522, 257]"/>
</Property>
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
<Dimension value="[575, 300]"/>
<Dimension value="[575, 400]"/>
</Property>
</Properties>
<AuxValues>
@ -48,9 +48,9 @@
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="jPanel1" pref="278" max="32767" attributes="0"/>
<Component id="jPanel1" pref="342" max="32767" attributes="0"/>
<Group type="102" attributes="0">
<Component id="modulesScrollPane" pref="233" max="32767" attributes="0"/>
<Component id="modulesScrollPane" max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="processUnallocPanel" max="-2" attributes="0"/>
</Group>
@ -108,24 +108,40 @@
<Layout>
<DimensionLayout dim="0">
<Group type="103" groupAlignment="0" attributes="0">
<Component id="jScrollPane1" alignment="0" pref="326" max="32767" attributes="0"/>
<Component id="jSeparator2" alignment="1" max="32767" attributes="0"/>
<Group type="102" alignment="1" attributes="0">
<EmptySpace max="32767" attributes="0"/>
<Group type="102" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Component id="jScrollPane1" pref="316" max="32767" attributes="0"/>
<Group type="102" attributes="0">
<Component id="scrollpane" max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="advancedButton" min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" pref="14" max="-2" attributes="0"/>
</Group>
</Group>
</Group>
<Component id="jSeparator2" max="32767" attributes="0"/>
</Group>
</DimensionLayout>
<DimensionLayout dim="1">
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="1" attributes="0">
<Component id="jScrollPane1" max="32767" attributes="0"/>
<EmptySpace max="-2" attributes="0"/>
<Component id="jScrollPane1" pref="242" max="32767" attributes="0"/>
<Group type="103" groupAlignment="0" attributes="0">
<Group type="102" alignment="0" attributes="0">
<EmptySpace min="-2" pref="22" max="-2" attributes="0"/>
<Component id="scrollpane" min="-2" pref="65" max="-2" attributes="0"/>
</Group>
<Group type="102" alignment="0" attributes="0">
<EmptySpace type="unrelated" max="-2" attributes="0"/>
<Component id="jSeparator2" min="-2" pref="10" max="-2" attributes="0"/>
<EmptySpace min="-2" pref="0" max="-2" attributes="0"/>
</Group>
<Group type="102" alignment="0" attributes="0">
<EmptySpace type="separate" max="-2" attributes="0"/>
<Component id="advancedButton" min="-2" max="-2" attributes="0"/>
<EmptySpace min="-2" max="-2" attributes="0"/>
</Group>
</Group>
<EmptySpace max="-2" attributes="0"/>
</Group>
</Group>
</DimensionLayout>
@ -136,6 +152,9 @@
<Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/ingest/Bundle.properties" key="IngestJobConfigurationPanel.advancedButton.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="actionCommand" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
<ResourceString bundle="org/sleuthkit/autopsy/ingest/Bundle.properties" key="IngestJobConfigurationPanel.advancedButton.actionCommand" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
</Property>
<Property name="enabled" type="boolean" value="false"/>
</Properties>
<Events>
@ -164,6 +183,40 @@
</Container>
</SubComponents>
</Container>
<Container class="javax.swing.JScrollPane" name="scrollpane">
<Properties>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
<Property name="horizontalScrollBarPolicy" type="int" value="31"/>
<Property name="verticalScrollBarPolicy" type="int" value="21"/>
</Properties>
<AuxValues>
<AuxValue name="autoScrollPane" type="java.lang.Boolean" value="true"/>
</AuxValues>
<Layout class="org.netbeans.modules.form.compat2.layouts.support.JScrollPaneSupportLayout"/>
<SubComponents>
<Component class="javax.swing.JTextArea" name="descriptionLabel">
<Properties>
<Property name="editable" type="boolean" value="false"/>
<Property name="background" type="java.awt.Color" editor="org.netbeans.beaninfo.editors.ColorEditor">
<Color blue="f0" green="f0" red="f0" type="rgb"/>
</Property>
<Property name="columns" type="int" value="20"/>
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
<Font name="Tahoma" size="11" style="0"/>
</Property>
<Property name="lineWrap" type="boolean" value="true"/>
<Property name="rows" type="int" value="5"/>
<Property name="wrapStyleWord" type="boolean" value="true"/>
<Property name="border" type="javax.swing.border.Border" editor="org.netbeans.modules.form.editors2.BorderEditor">
<Border info="null"/>
</Property>
</Properties>
</Component>
</SubComponents>
</Container>
</SubComponents>
</Container>
<Container class="javax.swing.JPanel" name="processUnallocPanel">
@ -183,7 +236,7 @@
<Group type="102" alignment="0" attributes="0">
<EmptySpace max="-2" attributes="0"/>
<Component id="processUnallocCheckbox" min="-2" max="-2" attributes="0"/>
<EmptySpace pref="60" max="32767" attributes="0"/>
<EmptySpace pref="108" max="32767" attributes="0"/>
</Group>
</Group>
</DimensionLayout>

View File

@ -107,6 +107,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
simplePanel.revalidate();
simplePanel.repaint();
advancedButton.setEnabled(null != selectedModule.getGlobalSettingsPanel());
descriptionLabel.setText(selectedModule.getDescription());
}
}
});
@ -131,12 +132,14 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
jSeparator2 = new javax.swing.JSeparator();
jScrollPane1 = new javax.swing.JScrollPane();
simplePanel = new javax.swing.JPanel();
scrollpane = new javax.swing.JScrollPane();
descriptionLabel = new javax.swing.JTextArea();
processUnallocPanel = new javax.swing.JPanel();
processUnallocCheckbox = new javax.swing.JCheckBox();
setMaximumSize(new java.awt.Dimension(5750, 3000));
setMinimumSize(new java.awt.Dimension(522, 257));
setPreferredSize(new java.awt.Dimension(575, 300));
setPreferredSize(new java.awt.Dimension(575, 400));
modulesScrollPane.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(160, 160, 160)));
modulesScrollPane.setPreferredSize(new java.awt.Dimension(160, 160));
@ -158,6 +161,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
jPanel1.setPreferredSize(new java.awt.Dimension(338, 257));
advancedButton.setText(org.openide.util.NbBundle.getMessage(IngestJobConfigurationPanel.class, "IngestJobConfigurationPanel.advancedButton.text")); // NOI18N
advancedButton.setActionCommand(org.openide.util.NbBundle.getMessage(IngestJobConfigurationPanel.class, "IngestJobConfigurationPanel.advancedButton.actionCommand")); // NOI18N
advancedButton.setEnabled(false);
advancedButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
@ -171,25 +175,49 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
simplePanel.setLayout(new javax.swing.BoxLayout(simplePanel, javax.swing.BoxLayout.PAGE_AXIS));
jScrollPane1.setViewportView(simplePanel);
scrollpane.setBorder(null);
scrollpane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
scrollpane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER);
descriptionLabel.setEditable(false);
descriptionLabel.setBackground(new java.awt.Color(240, 240, 240));
descriptionLabel.setColumns(20);
descriptionLabel.setFont(new java.awt.Font("Tahoma", 0, 11)); // NOI18N
descriptionLabel.setLineWrap(true);
descriptionLabel.setRows(5);
descriptionLabel.setWrapStyleWord(true);
descriptionLabel.setBorder(null);
scrollpane.setViewportView(descriptionLabel);
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 326, Short.MAX_VALUE)
.addComponent(jSeparator2, javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 316, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createSequentialGroup()
.addComponent(scrollpane)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(advancedButton)
.addContainerGap())
.addGap(14, 14, 14))))
.addComponent(jSeparator2)
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup()
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, 0)
.addComponent(advancedButton)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 242, Short.MAX_VALUE)
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(22, 22, 22)
.addComponent(scrollpane, javax.swing.GroupLayout.PREFERRED_SIZE, 65, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel1Layout.createSequentialGroup()
.addGap(18, 18, 18)
.addComponent(advancedButton)))
.addContainerGap())
);
@ -210,7 +238,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
.addGroup(processUnallocPanelLayout.createSequentialGroup()
.addContainerGap()
.addComponent(processUnallocCheckbox)
.addContainerGap(60, Short.MAX_VALUE))
.addContainerGap(108, Short.MAX_VALUE))
);
processUnallocPanelLayout.setVerticalGroup(
processUnallocPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
@ -238,15 +266,19 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 278, Short.MAX_VALUE)
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 342, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(modulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 233, Short.MAX_VALUE)
.addComponent(modulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(processUnallocPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed
processUnallocatedSpace = processUnallocCheckbox.isSelected();
}//GEN-LAST:event_processUnallocCheckboxActionPerformed
private void advancedButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_advancedButtonActionPerformed
final AdvancedConfigurationDialog dialog = new AdvancedConfigurationDialog();
@ -270,11 +302,9 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
dialog.display(selectedModule.getGlobalSettingsPanel());
}//GEN-LAST:event_advancedButtonActionPerformed
private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed
processUnallocatedSpace = processUnallocCheckbox.isSelected();
}//GEN-LAST:event_processUnallocCheckboxActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton advancedButton;
private javax.swing.JTextArea descriptionLabel;
private javax.swing.JPanel jPanel1;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JSeparator jSeparator2;
@ -282,6 +312,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
private javax.swing.JTable modulesTable;
private javax.swing.JCheckBox processUnallocCheckbox;
private javax.swing.JPanel processUnallocPanel;
private javax.swing.JScrollPane scrollpane;
private javax.swing.JPanel simplePanel;
private javax.swing.ButtonGroup timeGroup;
// End of variables declaration//GEN-END:variables

View File

@ -395,18 +395,18 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
}
@Override
synchronized public int getRowCount() {
public synchronized int getRowCount() {
return getNumberGroups();
}
public void markAllSeen() {
public synchronized void markAllSeen() {
for (TableEntry entry : messageData) {
entry.hasBeenSeen(true);
}
fireTableChanged(new TableModelEvent(this));
}
public int getNumberNewMessages() {
public synchronized int getNumberNewMessages() {
int newMessages = 0;
for (TableEntry entry : messageData) {
if (!entry.hasBeenSeen()) {
@ -416,11 +416,11 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
return newMessages;
}
synchronized int getNumberGroups() {
public synchronized int getNumberGroups() {
return messageData.size();
}
synchronized int getNumberMessages() {
public synchronized int getNumberMessages() {
int total = 0;
for (TableEntry e : messageData) {
total += e.messageGroup.getCount();
@ -428,7 +428,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
return total;
}
synchronized int getNumberUnreadMessages() {
public synchronized int getNumberUnreadMessages() {
int total = 0;
for (TableEntry e : messageData) {
if (!e.hasBeenVisited) {
@ -438,7 +438,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
return total;
}
synchronized int getNumberUnreadGroups() {
public synchronized int getNumberUnreadGroups() {
int total = 0;
for (TableEntry e : messageData) {
if (!e.hasBeenVisited) {
@ -513,7 +513,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
return ret;
}
synchronized public void addMessage(IngestMessage m) {
public synchronized void addMessage(IngestMessage m) {
//check how many messages per module with the same uniqness
//and add to existing group or create a new group
String moduleName = m.getSource();
@ -628,15 +628,27 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
}
public synchronized boolean isVisited(int rowNumber) {
if (rowNumber < messageData.size()) {
return messageData.get(rowNumber).hasBeenVisited();
} else {
return false;
}
}
public synchronized MessageType getMessageType(int rowNumber) {
if (rowNumber < messageData.size()) {
return messageData.get(rowNumber).messageGroup.getMessageType();
} else {
return null;
}
}
public synchronized IngestMessageGroup getMessageGroup(int rowNumber) {
if (rowNumber < messageData.size()) {
return messageData.get(rowNumber).messageGroup;
} else {
return null;
}
}
public synchronized void reSort(boolean chronoLogical) {
@ -701,26 +713,26 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
messages.add(message);
}
List<IngestMessage> getMessages() {
private List<IngestMessage> getMessages() {
return messages;
}
void add(IngestMessage message) {
synchronized void add(IngestMessage message) {
messages.add(message);
}
//add all messages from another group
void addAll(IngestMessageGroup group) {
synchronized void addAll(IngestMessageGroup group) {
for (IngestMessage m : group.getMessages()) {
messages.add(m);
}
}
int getCount() {
synchronized int getCount() {
return messages.size();
}
String getDetails() {
synchronized String getDetails() {
StringBuilder b = new StringBuilder("");
for (IngestMessage m : messages) {
String details = m.getDetails();
@ -739,7 +751,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
* return color corresp to priority
* @return
*/
Color getColor() {
synchronized Color getColor() {
int count = messages.size();
if (count == 1) {
return VERY_HIGH_PRI_COLOR;
@ -757,7 +769,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
* used for chrono sort
* @return
*/
Date getDatePosted() {
synchronized Date getDatePosted() {
return messages.get(messages.size() - 1).getDatePosted();
}
@ -765,35 +777,35 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
* get subject of the first message
* @return
*/
String getSubject() {
synchronized String getSubject() {
return messages.get(0).getSubject();
}
/*
* return unique key, should be the same for all msgs
*/
String getUniqueKey() {
synchronized String getUniqueKey() {
return messages.get(0).getUniqueKey();
}
/*
* return source module, should be the same for all msgs
*/
String getSource() {
synchronized String getSource() {
return messages.get(0).getSource();
}
/*
* return data of the first message
*/
BlackboardArtifact getData() {
synchronized BlackboardArtifact getData() {
return messages.get(0).getData();
}
/*
* return message type, should be the same for all msgs
*/
IngestMessage.MessageType getMessageType() {
synchronized IngestMessage.MessageType getMessageType() {
return messages.get(0).getMessageType();
}
}
@ -858,6 +870,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
cell.setFont(new Font("", Font.PLAIN, 16));
final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row);
if (messageGroup != null) {
MessageType mt = messageGroup.getMessageType();
if (mt == MessageType.ERROR) {
cell.setBackground(ERROR_COLOR);
@ -867,7 +880,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
//cell.setBackground(table.getBackground());
cell.setBackground(messageGroup.getColor());
}
}
return cell;
}
}
@ -898,6 +911,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
}
final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row);
if (messageGroup != null) {
MessageType mt = messageGroup.getMessageType();
if (mt == MessageType.ERROR) {
cell.setBackground(ERROR_COLOR);
@ -907,7 +921,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
//cell.setBackground(table.getBackground());
cell.setBackground(messageGroup.getColor());
}
}
return cell;
}
}
@ -933,6 +947,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
Component cell = super.getTableCellRendererComponent(table, aValue, isSelected, hasFocus, row, column);
final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row);
if (messageGroup != null) {
MessageType mt = messageGroup.getMessageType();
if (mt == MessageType.ERROR) {
cell.setBackground(ERROR_COLOR);
@ -942,6 +957,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
//cell.setBackground(table.getBackground());
cell.setBackground(messageGroup.getColor());
}
}
return cell;
}
@ -974,10 +990,12 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
messageTable.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
//check if has details
IngestMessageGroup m = getMessageGroup(selected);
if (m != null) {
String details = m.getDetails();
if (details != null && !details.equals("")) {
mainPanel.showDetails(selected);
}
}
messageTable.setCursor(null);
}
}

View File

@ -18,11 +18,32 @@
*/
package org.sleuthkit.autopsy.ingest;
import java.util.HashMap;
/**
* An adapter that provides a default implementation of the IngestModule
* interface.
*/
public abstract class IngestModuleAdapter implements IngestModule {
// Maps a JobId to the count of instances
static HashMap<Long, Long> moduleRefCount = new HashMap<>();
public static synchronized long moduleRefCountIncrementAndGet(long jobId) {
long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0;
long nextCount = count + 1;
moduleRefCount.put(jobId, nextCount);
return nextCount;
}
public static synchronized long moduleRefCountDecrementAndGet(long jobId) {
if (moduleRefCount.containsKey(jobId)) {
long count = moduleRefCount.get(jobId);
moduleRefCount.put(jobId, --count);
return count;
} else {
return 0;
}
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {

View File

@ -24,6 +24,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.Document;
@ -39,12 +40,15 @@ import org.w3c.dom.NodeList;
final class IngestPipelinesConfiguration {
private static final Logger logger = Logger.getLogger(IngestPipelinesConfiguration.class.getName());
private final static String PIPELINES_CONFIG_FILE = "pipeline_config.xml";
private final static String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd";
private static final String PIPELINE_CONFIG_FILE_VERSION_KEY = "PipelineConfigFileVersion";
private static final String PIPELINE_CONFIG_FILE_VERSION_NO_STRING = "1";
private static final int PIPELINE_CONFIG_FILE_VERSION_NO = 1;
private static final String PIPELINES_CONFIG_FILE = "pipeline_config.xml";
private static final String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd";
private static final String XML_PIPELINE_ELEM = "PIPELINE";
private static final String XML_PIPELINE_TYPE_ATTR = "type";
private final static String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis";
private final static String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis";
private static final String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis";
private static final String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis";
private static final String XML_MODULE_ELEM = "MODULE";
private static final String XML_MODULE_CLASS_NAME_ATTR = "location";
private static IngestPipelinesConfiguration instance;
@ -73,11 +77,16 @@ final class IngestPipelinesConfiguration {
private void readPipelinesConfigurationFile() {
try {
PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir", ex);
return;
boolean overWrite;
if (!ModuleSettings.settingExists(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY)) {
ModuleSettings.setConfigSetting(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY, PIPELINE_CONFIG_FILE_VERSION_NO_STRING);
overWrite = true;
} else {
int versionNumber = Integer.parseInt(ModuleSettings.getConfigSetting(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY));
overWrite = versionNumber < PIPELINE_CONFIG_FILE_VERSION_NO;
// TODO: Migrate user edits
}
PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE, overWrite);
String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE;
Document doc = XMLUtil.loadDoc(IngestPipelinesConfiguration.class, configFilePath, PIPELINES_CONFIG_FILE_XSD);
@ -133,5 +142,8 @@ final class IngestPipelinesConfiguration {
}
}
}
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir", ex);
}
}
}

View File

@ -32,11 +32,13 @@ import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
@ -57,12 +59,20 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getInstance();
private int filesProcessed = 0;
private boolean filesToFire = false;
private AtomicInteger filesProcessed = new AtomicInteger(0);
private volatile boolean filesToFire = false;
private long jobId;
ExifParserFileIngestModule() {
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
}
@Override
public ProcessResult process(AbstractFile content) {
//skip unalloc
@ -76,8 +86,8 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
}
// update the tree every 1000 files if we have EXIF data that is not being being displayed
filesProcessed++;
if ((filesToFire) && (filesProcessed % 1000 == 0)) {
final int filesProcessedValue = filesProcessed.incrementAndGet();
if ((filesToFire) && (filesProcessedValue % 1000 == 0)) {
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
filesToFire = false;
}
@ -187,9 +197,12 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to check for this final event on the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
}
}
}
}

View File

@ -23,6 +23,7 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
@ -38,7 +39,6 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskException;
/**
@ -50,8 +50,9 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
private final IngestServices services = IngestServices.getInstance();
private final FileExtMismatchDetectorModuleSettings settings;
private HashMap<String, String[]> SigTypeToExtMap = new HashMap<>();
private long processTime = 0;
private long numFiles = 0;
private long jobId;
private static AtomicLong processTime = new AtomicLong(0);
private static AtomicLong numFiles = new AtomicLong(0);
FileExtMismatchIngestModule(FileExtMismatchDetectorModuleSettings settings) {
this.settings = settings;
@ -59,6 +60,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault();
SigTypeToExtMap = xmlLoader.load();
}
@ -82,8 +85,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
boolean mismatchDetected = compareSigTypeToExt(abstractFile);
processTime += (System.currentTimeMillis() - startTime);
numFiles++;
processTime.getAndAdd(System.currentTimeMillis() - startTime);
numFiles.getAndIncrement();
if (mismatchDetected) {
// add artifact
@ -149,15 +152,17 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
StringBuilder detailsSb = new StringBuilder();
detailsSb.append("<table border='0' cellpadding='4' width='280'>");
detailsSb.append("<tr><td>").append(FileExtMismatchDetectorModuleFactory.getModuleName()).append("</td></tr>");
detailsSb.append("<tr><td>").append(
NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalProcTime"))
.append("</td><td>").append(processTime).append("</td></tr>\n");
.append("</td><td>").append(processTime.get()).append("</td></tr>\n");
detailsSb.append("<tr><td>").append(
NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalFiles"))
.append("</td><td>").append(numFiles).append("</td></tr>\n");
.append("</td><td>").append(numFiles.get()).append("</td></tr>\n");
detailsSb.append("</table>");
services.postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileExtMismatchDetectorModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
@ -165,3 +170,4 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
detailsSb.toString()));
}
}
}

View File

@ -61,7 +61,7 @@ class FileExtMismatchXML {
this.filePath = filePath;
try {
boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME);
boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME, false);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error copying default mismatch configuration to user dir ", ex);
}

View File

@ -18,11 +18,12 @@
*/
package org.sleuthkit.autopsy.modules.filetypeid;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.AbstractFile;
@ -44,8 +45,10 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName());
private static final long MIN_FILE_SIZE = 512;
private final FileTypeIdModuleSettings settings;
private long matchTime = 0;
private long numFiles = 0;
private long jobId;
private static AtomicLong matchTime = new AtomicLong(0);
private static AtomicLong numFiles = new AtomicLong(0);
// The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed.
// If desired in the future to be more knowledgable about weird files or rare formats, we could
// actually have a list of detectors which are called in order until a match is found.
@ -55,6 +58,12 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
this.settings = settings;
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId);
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
// skip non-files
@ -75,8 +84,8 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
try {
long startTime = System.currentTimeMillis();
FileTypeDetectionInterface.FileIdInfo fileId = detector.attemptMatch(abstractFile);
matchTime += (System.currentTimeMillis() - startTime);
numFiles++;
matchTime.getAndAdd(System.currentTimeMillis() - startTime);
numFiles.getAndIncrement();
if (!fileId.type.isEmpty()) {
// add artifact
@ -98,21 +107,24 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
@Override
public void shutDown(boolean ingestJobCancelled) {
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
StringBuilder detailsSb = new StringBuilder();
detailsSb.append("<table border='0' cellpadding='4' width='280'>");
detailsSb.append("<tr><td>").append(FileTypeIdModuleFactory.getModuleName()).append("</td></tr>");
detailsSb.append("<tr><td>")
.append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalProcTime"))
.append("</td><td>").append(matchTime).append("</td></tr>\n");
.append("</td><td>").append(matchTime.get()).append("</td></tr>\n");
detailsSb.append("<tr><td>")
.append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalFiles"))
.append("</td><td>").append(numFiles).append("</td></tr>\n");
.append("</td><td>").append(numFiles.get()).append("</td></tr>\n");
detailsSb.append("</table>");
IngestServices.getInstance().postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileTypeIdModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"FileTypeIdIngestModule.complete.srvMsg.text"),
detailsSb.toString()));
}
}
/**
* Validate if a given mime type is in the detector's registry.

View File

@ -90,6 +90,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
private final byte[] fileHeaderBuffer = new byte[readHeaderSize];
private static final int ZIP_SIGNATURE_BE = 0x504B0304;
private IngestJobContext context;
private long jobId;
SevenZipIngestModule() {
}
@ -97,6 +98,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
jobId = context.getJobId();
final Case currentCase = Case.getCurrentCase();
@ -121,6 +123,8 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
}
// if first instance of this module for this job then check 7zip init
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
try {
SevenZip.initSevenZipFromPlatformJAR();
String platform = SevenZip.getUsedPlatform();
@ -134,6 +138,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
services.postMessage(IngestMessage.createErrorMessage(ArchiveFileExtractorModuleFactory.getModuleName(), msg, details));
throw new RuntimeException(e);
}
}
archiveDepthCountTree = new ArchiveDepthCountTree();
}

View File

@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.hashdatabase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
@ -56,37 +55,24 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
private final HashLookupModuleSettings settings;
private List<HashDb> knownBadHashSets = new ArrayList<>();
private List<HashDb> knownHashSets = new ArrayList<>();
private long jobID;
// Maps a JobId to the count of instances
static HashMap<Long, Long> moduleRefCount = new HashMap<>();
private long jobId;
static AtomicLong totalKnownBadCount = new AtomicLong(0);
static AtomicLong totalCalctime = new AtomicLong(0);
static AtomicLong totalLookuptime = new AtomicLong(0);
private static synchronized void moduleRefCountIncrement(long jobID) {
long count = moduleRefCount.containsKey(jobID) ? moduleRefCount.get(jobID) : 0;
moduleRefCount.put(jobID, count + 1);
}
private static synchronized long moduleRefCountDecrementAndGet(long jobID) {
if (moduleRefCount.containsKey(jobID)) {
long count = moduleRefCount.get(jobID);
moduleRefCount.put(jobID, --count);
return count;
} else {
return 0;
}
}
HashDbIngestModule(HashLookupModuleSettings settings) {
this.settings = settings;
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException {
jobID = context.getJobId();
moduleRefCountIncrement(jobID);
jobId = context.getJobId();
getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets);
getEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets);
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
// if first module for this job then post error msgs if needed
if (knownBadHashSets.isEmpty()) {
services.postMessage(IngestMessage.createWarningMessage(
HashLookupModuleFactory.getModuleName(),
@ -96,7 +82,6 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
"HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn")));
}
getEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets);
if (knownHashSets.isEmpty()) {
services.postMessage(IngestMessage.createWarningMessage(
HashLookupModuleFactory.getModuleName(),
@ -106,6 +91,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
"HashDbIngestModule.knownFileSearchWillNotExecuteWarn")));
}
}
}
private void getEnabledHashSets(List<HashDb> hashSets, List<HashDb> enabledHashSets) {
enabledHashSets.clear();
@ -317,7 +303,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
@Override
public void shutDown(boolean ingestJobCancelled) {
if (moduleRefCountDecrementAndGet(jobID) == 0) {
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) {
StringBuilder detailsSb = new StringBuilder();
//details

View File

@ -101,7 +101,13 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues
SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it
};
private Map<Long, IngestStatus> ingestStatus;
private static final Map<Long, IngestStatus> ingestStatus = new HashMap<>(); //guarded by itself
static void putIngestStatus(long id, IngestStatus status) {
synchronized(ingestStatus) {
ingestStatus.put(id, status);
}
}
KeywordSearchIngestModule(KeywordSearchJobSettings settings) {
this.settings = settings;
@ -117,12 +123,14 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
public void startUp(IngestJobContext context) throws IngestModuleException {
logger.log(Level.INFO, "Initializing instance {0}", instanceNum);
initialized = false;
jobId = context.getJobId();
caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika();
ingester = Server.getIngester();
// increment the module reference count
// if first instance of this module for this job then check the server and existence of keywords
if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) {
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
@ -150,6 +158,21 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
ex.getMessage()));
}
// check if this job has any searchable keywords
List<KeywordList> keywordLists = KeywordSearchListsXML.getCurrent().getListsL();
boolean hasKeywordsForSearch = false;
for (KeywordList keywordList : keywordLists) {
if (settings.isKeywordListEnabled(keywordList.getName()) && !keywordList.getKeywords().isEmpty()) {
hasKeywordsForSearch = true;
break;
}
}
if (!hasKeywordsForSearch) {
services.postMessage(IngestMessage.createWarningMessage(KeywordSearchModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
}
}
//initialize extractors
stringExtractor = new AbstractFileStringExtract(this);
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
@ -167,21 +190,6 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
textExtractors.add(new AbstractFileHtmlExtract(this));
textExtractors.add(new AbstractFileTikaTextExtract(this));
ingestStatus = new HashMap<>();
List<KeywordList> keywordLists = KeywordSearchListsXML.getCurrent().getListsL();
boolean hasKeywordsForSearch = false;
for (KeywordList keywordList : keywordLists) {
if (settings.isKeywordListEnabled(keywordList.getName()) && !keywordList.getKeywords().isEmpty()) {
hasKeywordsForSearch = true;
break;
}
}
if (!hasKeywordsForSearch) {
services.postMessage(IngestMessage.createWarningMessage(KeywordSearchModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
}
indexer = new Indexer();
initialized = true;
}
@ -191,7 +199,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
if (initialized == false) //error initializing indexing/Solr
{
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName());
ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
putIngestStatus(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
return ProcessResult.OK;
}
try {
@ -247,7 +255,10 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
// Remove from the search list and trigger final commit and final search
SearchRunner.getInstance().endJob(jobId);
// We only need to post the summary msg from the last module per job
if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) {
postIndexSummary();
}
//log number of files / chunks in index
//signal a potential change in number of text_ingested files
@ -276,7 +287,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
* Common cleanup code when module stops or final searcher completes
*/
private void cleanup() {
synchronized(ingestStatus) {
ingestStatus.clear();
}
textExtractors.clear();
textExtractors = null;
@ -297,6 +310,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
int error_text = 0;
int error_index = 0;
int error_io = 0;
synchronized(ingestStatus) {
for (IngestStatus s : ingestStatus.values()) {
switch (s) {
case TEXT_INGESTED:
@ -321,6 +336,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
;
}
}
}
StringBuilder msg = new StringBuilder();
msg.append("<table border=0><tr><td>").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("</td><td>").append(text_ingested).append("</td></tr>");
@ -393,16 +409,16 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
private boolean extractStringsAndIndex(AbstractFile aFile) {
try {
if (stringExtractor.index(aFile)) {
ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED);
putIngestStatus(aFile.getId(), IngestStatus.STRINGS_INGESTED);
return true;
} else {
logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
return false;
}
} catch (IngesterException ex) {
logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex);
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
return false;
}
}
@ -448,9 +464,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
if ((indexContent == false || aFile.isDir() || size == 0)) {
try {
ingester.ingest(aFile, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED);
putIngestStatus(aFile.getId(), IngestStatus.METADATA_INGESTED);
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex);
}
return;
@ -484,9 +500,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) {
try {
ingester.ingest(aFile, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED);
putIngestStatus(aFile.getId(), IngestStatus.METADATA_INGESTED);
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex);
}
return;
@ -499,20 +515,20 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
//logger.log(Level.INFO, "indexing: " + aFile.getName());
if (!extractTextAndIndex(aFile, detectedFormat)) {
logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
} else {
ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED);
putIngestStatus(aFile.getId(), IngestStatus.TEXT_INGESTED);
wasTextAdded = true;
}
} catch (IngesterException e) {
logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", "
+ aFile.getName(), e);
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
} catch (Exception e) {
logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", "
+ aFile.getName(), e);
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
}
}
@ -522,5 +538,4 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
}
}
}
}

View File

@ -39,7 +39,7 @@ import java.io.FileReader;
import java.io.IOException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -62,27 +62,29 @@ class Chrome extends Extract {
private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id";
private static final String loginQuery = "select origin_url, username_value, signon_realm from logins";
private final Logger logger = Logger.getLogger(this.getClass().getName());
private Content dataSource;
private IngestJobContext context;
Chrome() {
moduleName = NbBundle.getMessage(Chrome.class, "Chrome.moduleName");
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
dataFound = false;
this.getHistory(dataSource, statusHelper);
this.getBookmark(dataSource, statusHelper);
this.getCookie(dataSource, statusHelper);
this.getLogin(dataSource, statusHelper);
this.getDownload(dataSource, statusHelper);
this.getHistory();
this.getBookmark();
this.getCookie();
this.getLogin();
this.getDownload();
}
/**
* Query for history databases and add artifacts
* @param dataSource
* @param controller
*/
private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getHistory() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> historyFiles;
try {
@ -126,7 +128,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
@ -164,10 +166,8 @@ class Chrome extends Extract {
/**
* Search for bookmark files and make artifacts.
* @param dataSource
* @param controller
*/
private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getBookmark() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> bookmarkFiles = null;
try {
@ -204,7 +204,7 @@ class Chrome extends Extract {
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps});
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
@ -305,10 +305,8 @@ class Chrome extends Extract {
/**
* Queries for cookie files and adds artifacts
* @param dataSource
* @param controller
*/
private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getCookie() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> cookiesFiles;
@ -344,7 +342,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
@ -383,10 +381,8 @@ class Chrome extends Extract {
/**
* Queries for download files and adds artifacts
* @param dataSource
* @param controller
*/
private void getDownload(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getDownload() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> downloadFiles = null;
try {
@ -420,7 +416,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
@ -473,10 +469,8 @@ class Chrome extends Extract {
/**
* Queries for login files and adds artifacts
* @param dataSource
* @param controller
*/
private void getLogin(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getLogin() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> signonFiles;
try {
@ -511,7 +505,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}

View File

@ -32,7 +32,7 @@ import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.datamodel.*;
@ -40,7 +40,7 @@ abstract class Extract {
protected Case currentCase = Case.getCurrentCase();
protected SleuthkitCase tskCase = currentCase.getSleuthkitCase();
public final Logger logger = Logger.getLogger(this.getClass().getName());
private final Logger logger = Logger.getLogger(this.getClass().getName());
private final ArrayList<String> errorMessages = new ArrayList<>();
String moduleName = "";
boolean dataFound = false;
@ -51,7 +51,7 @@ abstract class Extract {
void init() throws IngestModuleException {
}
abstract void process(Content dataSource, DataSourceIngestModuleStatusHelper controller);
abstract void process(Content dataSource, IngestJobContext context);
void complete() {
}

View File

@ -52,7 +52,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.*;
/**
@ -66,6 +66,8 @@ class ExtractIE extends Extract {
private String JAVA_PATH;
private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
private ExecUtil execPasco;
private Content dataSource;
private IngestJobContext context;
ExtractIE() {
moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text");
@ -74,19 +76,19 @@ class ExtractIE extends Extract {
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
dataFound = false;
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
this.getHistory(dataSource, controller);
this.getBookmark();
this.getCookie();
this.getHistory();
}
/**
* Finds the files storing bookmarks and creates artifacts
* @param dataSource
* @param controller
*/
private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getBookmark() {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> favoritesFiles;
try {
@ -110,7 +112,7 @@ class ExtractIE extends Extract {
continue;
}
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
@ -180,10 +182,8 @@ class ExtractIE extends Extract {
/**
* Finds files that store cookies and adds artifacts for them.
* @param dataSource
* @param controller
*/
private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getCookie() {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> cookiesFiles;
try {
@ -202,7 +202,7 @@ class ExtractIE extends Extract {
dataFound = true;
for (AbstractFile cookiesFile : cookiesFiles) {
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
if (cookiesFile.getSize() == 0) {
@ -257,11 +257,9 @@ class ExtractIE extends Extract {
/**
* Locates index.dat files, runs Pasco on them, and creates artifacts.
* @param dataSource
* @param controller
*/
private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
logger.log(Level.INFO, "Pasco results path: " + moduleTempResultsDir);
private void getHistory() {
logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir);
boolean foundHistory = false;
final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false);
@ -273,7 +271,7 @@ class ExtractIE extends Extract {
}
final String pascoHome = pascoRoot.getAbsolutePath();
logger.log(Level.INFO, "Pasco2 home: " + pascoHome);
logger.log(Level.INFO, "Pasco2 home: {0}", pascoHome);
PASCO_LIB_PATH = pascoHome + File.separator + "pasco2.jar" + File.pathSeparator
+ pascoHome + File.separator + "*";
@ -283,7 +281,7 @@ class ExtractIE extends Extract {
// get index.dat files
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> indexFiles = null;
List<AbstractFile> indexFiles;
try {
indexFiles = fileManager.findFiles(dataSource, "index.dat");
} catch (TskCoreException ex) {
@ -312,7 +310,7 @@ class ExtractIE extends Extract {
//indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat";
temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName;
File datFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
try {
@ -337,7 +335,7 @@ class ExtractIE extends Extract {
//Delete index<n>.dat file since it was succcessfully by Pasco
datFile.delete();
} else {
logger.log(Level.WARNING, "pasco execution failed on: " + this.getName());
logger.log(Level.WARNING, "pasco execution failed on: {0}", this.getName());
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getName()));
}
@ -361,7 +359,7 @@ class ExtractIE extends Extract {
Writer writer = null;
try {
final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName;
logger.log(Level.INFO, "Writing pasco results to: " + outputFileFullPath);
logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath);
writer = new FileWriter(outputFileFullPath);
execPasco = new ExecUtil();
execPasco.execute(writer, JAVA_PATH,
@ -402,7 +400,7 @@ class ExtractIE extends Extract {
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getName(),
file.getName()));
logger.log(Level.WARNING, "Pasco Output not found: " + file.getPath());
logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath());
return;
}

View File

@ -37,7 +37,7 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo;
import org.sleuthkit.datamodel.*;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
@ -64,6 +64,8 @@ class ExtractRegistry extends Extract {
private boolean rrFullFound = false; // true if we found the full version of regripper
final private static String MODULE_VERSION = "1.0";
private ExecUtil execRR;
private Content dataSource;
private IngestJobContext context;
//hide public constructor to prevent from instantiation by ingest module loader
ExtractRegistry() {
@ -77,7 +79,7 @@ class ExtractRegistry extends Extract {
}
final String rrHome = rrRoot.getAbsolutePath();
logger.log(Level.INFO, "RegRipper home: " + rrHome);
logger.log(Level.INFO, "RegRipper home: {0}", rrHome);
if (PlatformUtil.isWindowsOS()) {
RR_PATH = rrHome + File.separator + "rip.exe";
@ -94,7 +96,7 @@ class ExtractRegistry extends Extract {
}
final String rrFullHome = rrFullRoot.getAbsolutePath();
logger.log(Level.INFO, "RegRipper Full home: " + rrFullHome);
logger.log(Level.INFO, "RegRipper Full home: {0}", rrFullHome);
if (PlatformUtil.isWindowsOS()) {
RR_FULL_PATH = rrFullHome + File.separator + "rip.exe";
@ -105,10 +107,8 @@ class ExtractRegistry extends Extract {
/**
* Search for the registry hives on the system.
* @param dataSource Data source to search for hives in.
* @return List of registry hives
*/
private List<AbstractFile> findRegistryFiles(Content dataSource) {
private List<AbstractFile> findRegistryFiles() {
List<AbstractFile> allRegistryFiles = new ArrayList<>();
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
@ -138,12 +138,9 @@ class ExtractRegistry extends Extract {
/**
* Identifies registry files in the database by mtimeItem, runs regripper on them, and parses the output.
*
* @param dataSource
* @param controller
*/
private void analyzeRegistryFiles(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
List<AbstractFile> allRegistryFiles = findRegistryFiles(dataSource);
private void analyzeRegistryFiles() {
List<AbstractFile> allRegistryFiles = findRegistryFiles();
// open the log file
FileWriter logFile = null;
@ -171,7 +168,7 @@ class ExtractRegistry extends Extract {
continue;
}
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
@ -187,7 +184,7 @@ class ExtractRegistry extends Extract {
logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal);
RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
@ -572,8 +569,10 @@ class ExtractRegistry extends Extract {
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
analyzeRegistryFiles(dataSource, controller);
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
analyzeRegistryFiles();
}
@Override

View File

@ -36,7 +36,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
class Firefox extends Extract {
private static final Logger logger = Logger.getLogger(Firefox.class.getName());
private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0";
private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies";
private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies";
@ -59,21 +60,25 @@ class Firefox extends Extract {
private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads";
private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3";
private final IngestServices services = IngestServices.getInstance();
private Content dataSource;
private IngestJobContext context;
Firefox() {
moduleName = NbBundle.getMessage(Firefox.class, "Firefox.moduleName");
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
dataFound = false;
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);
this.getDownload(dataSource, controller);
this.getCookie(dataSource, controller);
this.getHistory();
this.getBookmark();
this.getDownload();
this.getCookie();
}
private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getHistory() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> historyFiles;
try {
@ -111,14 +116,14 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, historyQuery);
logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()});
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),
"Firefox.parentModuleName.noSpace"),
@ -155,14 +160,11 @@ class Firefox extends Extract {
/**
* Queries for bookmark files and adds artifacts
*
* @param dataSource
* @param controller
*/
private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getBookmark() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> bookmarkFiles = null;
List<AbstractFile> bookmarkFiles;
try {
bookmarkFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox");
} catch (TskCoreException ex) {
@ -195,15 +197,15 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, bookmarkQuery);
logger.log(Level.INFO, moduleName + "- Now getting bookmarks from " + temps + " with " + tempList.size() + "artifacts identified.");
logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()});
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),
"Firefox.parentModuleName.noSpace"),
@ -239,13 +241,10 @@ class Firefox extends Extract {
/**
* Queries for cookies file and adds artifacts
*
* @param dataSource
* @param controller
*/
private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getCookie() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> cookiesFiles = null;
List<AbstractFile> cookiesFiles;
try {
cookiesFiles = fileManager.findFiles(dataSource, "cookies.sqlite", "Firefox");
} catch (TskCoreException ex) {
@ -278,12 +277,12 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
boolean checkColumn = Util.checkColumn("creationTime", "moz_cookies", temps);
String query = null;
String query;
if (checkColumn) {
query = cookieQuery;
} else {
@ -291,10 +290,10 @@ class Firefox extends Extract {
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, query);
logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified.");
logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()});
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),
"Firefox.parentModuleName.noSpace"),
@ -339,27 +338,21 @@ class Firefox extends Extract {
/**
* Queries for downloads files and adds artifacts
*
* @param dataSource
* @param controller
*/
private void getDownload(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
getDownloadPreVersion24(dataSource, controller);
getDownloadVersion24(dataSource, controller);
private void getDownload() {
getDownloadPreVersion24();
getDownloadVersion24();
}
/**
* Finds downloads artifacts from Firefox data from versions before 24.0.
*
* Downloads were stored in a separate downloads database.
*
* @param dataSource
* @param controller
*/
private void getDownloadPreVersion24(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getDownloadPreVersion24() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> downloadsFiles = null;
List<AbstractFile> downloadsFiles;
try {
downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox");
} catch (TskCoreException ex) {
@ -392,7 +385,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
@ -464,13 +457,10 @@ class Firefox extends Extract {
* Gets download artifacts from Firefox data from version 24.
*
* Downloads are stored in the places database.
*
* @param dataSource
* @param controller
*/
private void getDownloadVersion24(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getDownloadVersion24() {
FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> downloadsFiles = null;
List<AbstractFile> downloadsFiles;
try {
downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox");
} catch (TskCoreException ex) {
@ -504,17 +494,17 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
dbFile.delete();
break;
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, downloadQueryVersion24);
logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified.");
logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()});
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),

View File

@ -30,7 +30,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
@ -48,6 +48,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
private final List<Extract> extracters = new ArrayList<>();
private final List<Extract> browserExtracters = new ArrayList<>();
private IngestServices services = IngestServices.getInstance();
private IngestJobContext context;
private StringBuilder subCompleted = new StringBuilder();
RAImageIngestModule() {
@ -55,6 +56,8 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
Extract registry = new ExtractRegistry();
Extract iexplore = new ExtractIE();
Extract recentDocuments = new RecentDocumentsByLnk();
@ -79,22 +82,22 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
}
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
services.postMessage(IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName()));
controller.switchToDeterminate(extracters.size());
controller.progress(0);
progressBar.switchToDeterminate(extracters.size());
progressBar.progress(0);
ArrayList<String> errors = new ArrayList<>();
for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i);
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName());
break;
}
try {
extracter.process(dataSource, controller);
extracter.process(dataSource, context);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex);
subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed",
@ -102,7 +105,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
errors.add(
NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName()));
}
controller.progress(i + 1);
progressBar.progress(i + 1);
errors.addAll(extracter.getErrorMessages());
}

View File

@ -32,7 +32,8 @@ import java.util.Collection;
import org.sleuthkit.autopsy.coreutils.JLNK;
import org.sleuthkit.autopsy.coreutils.JLnkParser;
import org.sleuthkit.autopsy.coreutils.JLnkParserException;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -49,13 +50,15 @@ import org.sleuthkit.datamodel.*;
class RecentDocumentsByLnk extends Extract {
private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName());
private IngestServices services = IngestServices.getInstance();
private Content dataSource;
private IngestJobContext context;
/**
* Find the documents that Windows stores about recent documents and make artifacts.
* @param dataSource
* @param controller
*/
private void getRecentDocuments(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getRecentDocuments() {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> recentFiles;
@ -76,14 +79,14 @@ class RecentDocumentsByLnk extends Extract {
dataFound = true;
for (AbstractFile recentFile : recentFiles) {
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break;
}
if (recentFile.getSize() == 0) {
continue;
}
JLNK lnk = null;
JLNK lnk;
JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize());
try {
lnk = lnkParser.parse();
@ -100,7 +103,7 @@ class RecentDocumentsByLnk extends Extract {
continue;
}
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
String path = lnk.getBestPath();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(),
NbBundle.getMessage(this.getClass(),
@ -122,8 +125,10 @@ class RecentDocumentsByLnk extends Extract {
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
dataFound = false;
this.getRecentDocuments(dataSource, controller);
this.getRecentDocuments();
}
}

View File

@ -31,11 +31,11 @@ import java.util.logging.Level;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
@ -62,6 +62,7 @@ import org.xml.sax.SAXException;
*/
class SearchEngineURLQueryAnalyzer extends Extract {
private static final Logger logger = Logger.getLogger(SearchEngineURLQueryAnalyzer.class.getName());
private static final String XMLFILE = "SEUQAMappings.xml";
private static final String XSDFILE = "SearchEngineSchema.xsd";
private static String[] searchEngineNames;
@ -71,6 +72,8 @@ class SearchEngineURLQueryAnalyzer extends Extract {
NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.engineName.none"),
NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.domainSubStr.none"),
new HashMap<String,String>());
private Content dataSource;
private IngestJobContext context;
SearchEngineURLQueryAnalyzer() {
}
@ -223,7 +226,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
return basereturn;
}
private void getURLs(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
private void getURLs() {
int totalQueries = 0;
try {
//from blackboard_artifacts
@ -255,7 +258,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
Collection<BlackboardAttribute> listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID());
getAttributes:
for (BlackboardAttribute attribute : listAttributes) {
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
break getAll; //User cancled the process.
}
if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) {
@ -292,13 +295,13 @@ class SearchEngineURLQueryAnalyzer extends Extract {
} catch (TskException e) {
logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e);
} finally {
if (controller.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
logger.info("Operation terminated by user.");
}
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(
NbBundle.getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.parentModuleName.noSpace"),
BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY));
logger.info("Extracted " + totalQueries + " queries from the blackboard");
logger.log(Level.INFO, "Extracted {0} queries from the blackboard", totalQueries);
}
}
@ -314,15 +317,17 @@ class SearchEngineURLQueryAnalyzer extends Extract {
}
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
this.getURLs(dataSource, controller);
public void process(Content dataSource, IngestJobContext context) {
this.dataSource = dataSource;
this.context = context;
this.getURLs();
logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals());
}
@Override
void init() throws IngestModuleException {
try {
PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE);
PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE, false);
init2();
} catch (IOException e) {
String message = "Unable to find " + XMLFILE;

View File

@ -87,7 +87,7 @@ class UsbDeviceIdMapper {
*/
private void loadDeviceMap() throws FileNotFoundException, IOException {
devices = new HashMap<>();
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile);
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile, false);
try (Scanner dat = new Scanner(new FileInputStream(new java.io.File(PlatformUtil.getUserConfigDirectory() + File.separator + "USB_DATA.txt")))) {
/* Syntax of file:
*

View File

@ -53,7 +53,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName());
private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver";
private String moduleOutputDirPath;
private String configFileName = "scalpel.conf";
private final String configFileName = "scalpel.conf";
private String configFilePath;
private boolean initialized = false;
private ScalpelCarver carver;
@ -100,7 +100,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
// copy the default config file to the user's home directory if one
// is not already there
try {
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName, false);
} catch (IOException ex) {
String message = "Could not obtain the path to the Scalpel configuration file.";
logger.log(Level.SEVERE, message, ex);

View File

@ -24,7 +24,7 @@ import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestServices;
@ -53,12 +53,14 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
private boolean skipped = false;
private String calculatedHash = "";
private String storedHash = "";
private IngestJobContext context;
EwfVerifyIngestModule() {
}
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
verified = false;
skipped = false;
img = null;
@ -79,7 +81,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
}
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) {
imgName = dataSource.getName();
try {
img = dataSource.getImage();
@ -145,7 +147,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
// Read in byte size chunks and update the hash value with the data.
for (int i = 0; i < totalChunks; i++) {
if (statusHelper.isIngestJobCancelled()) {
if (context.isJobCancelled()) {
return ProcessResult.OK;
}
data = new byte[(int) chunkSize];

0
git-daemon-export-okay Normal file
View File

View File

@ -54,7 +54,6 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName());
private IngestServices services = IngestServices.getInstance();
private int messageId = 0; // RJCTODO: Not thread safe
private FileManager fileManager;
private IngestJobContext context;