Did pull request prep and merge bug fixing

This commit is contained in:
Richard Cordovano 2014-03-20 12:49:34 -04:00
parent ff0b814225
commit 8c433c12a1
50 changed files with 938 additions and 1017 deletions

View File

@ -74,7 +74,7 @@ class AddImageWizardIngestConfigPanel implements WizardDescriptor.Panel<WizardDe
this.dataSourcePanel = dsPanel;
ingestConfig = new IngestJobLauncher(AddImageWizardIngestConfigPanel.class.getCanonicalName());
List<String> messages = ingestConfig.getMissingIngestModuleMessages();
List<String> messages = ingestConfig.getContextSettingsWarnings();
if (messages.isEmpty() == false) {
StringBuilder warning = new StringBuilder();
for (String message : messages) {

View File

@ -57,7 +57,7 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
private static final Logger logger = Logger.getLogger(SampleDataSourceIngestModule.class);
@Override
public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
Case case1 = Case.getCurrentCase();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
@ -76,9 +76,9 @@ class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSo
} catch (TskCoreException ex) {
logger.fatal("Error retrieving files from database: " + ex.getLocalizedMessage());
return IngestModule.ResultCode.OK;
return IngestModule.ProcessResult.OK;
}
return IngestModule.ResultCode.OK;
return IngestModule.ProcessResult.OK;
}
}

View File

@ -89,16 +89,16 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
}
@Override
public IngestModule.ResultCode process(AbstractFile abstractFile) {
public IngestModule.ProcessResult process(AbstractFile abstractFile) {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
return IngestModule.ResultCode.OK;
return IngestModule.ProcessResult.OK;
}
// skip NSRL / known files
if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) {
return IngestModule.ResultCode.OK;
return IngestModule.ProcessResult.OK;
}
/* Do a non-sensical calculation of the number of 0x00 bytes
@ -117,7 +117,7 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
if (attrId != -1) {
// Make an attribute using the ID for the private type that we previously created.
BlackboardAttribute attr = new BlackboardAttribute(attrId, "SampleFileIngestModule", count); // RJCTODO: Set up with name as exmaple
BlackboardAttribute attr = new BlackboardAttribute(attrId, "SampleFileIngestModule", count); // RJCTODO: Set up with module name as example
/* add it to the general info artifact. In real modules, you would likely have
* more complex data types and be making more specific artifacts.
@ -126,10 +126,10 @@ class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestMo
art.addAttribute(attr);
}
return IngestModule.ResultCode.OK;
return IngestModule.ProcessResult.OK;
} catch (TskCoreException ex) {
Exceptions.printStackTrace(ex);
return IngestModule.ResultCode.ERROR;
return IngestModule.ProcessResult.ERROR;
}
}

View File

@ -24,9 +24,6 @@ IngestJobLauncher.modName.tbirdParser.text=Thunderbird Parser
IngestJobLauncher.modName.mboxParser.text=MBox Parser
IngestJobLauncher.modName.emailParser.text=Email Parser
IngestJobLauncher.enabledMods.notFound.msg={0} was previously enabled, but could not be found
DataSourceTaskWorker.displayName.text={0} dataSource id\:{1}
DataSourceTaskWorker.progress.pending={0} (Pending...)
DataSourceTaskWorker.progress.cancelling={0} (Cancelling...)
IngestDialog.title.text=Ingest Modules
IngestDialog.startButton.title=Start
IngestDialog.closeButton.title=Close
@ -49,10 +46,13 @@ IngestManager.toHtmlStr.totalErrs.text=Total errors\: {0}
IngestManager.toHtmlStr.module.text=Module
IngestManager.toHtmlStr.time.text=Time
IngestManager.toHtmlStr.errors.text=Errors
IngestManager.IngestAbstractFileProcessor.displayName=File Ingest
IngestManager.IngestAbstractFileProcessor.process.cancelling={0} (Cancelling...)
IngestManager.FileTaskWorker.displayName=File Ingest
IngestManager.FileTaskWorker.process.cancelling={0} (Cancelling...)
IngestManager.EnqueueWorker.displayName.text=Queueing Ingest
IngestManager.EnqueueWorker.process.cancelling={0} (Cancelling...)
IngestManager.DataSourceTaskWorker.displayName.text={0} dataSource id\:{1}
IngestManager.DataSourceTaskWorker.progress.pending={0} (Pending...)
IngestManager.DataSourceTaskWorker.progress.cancelling={0} (Cancelling...)
IngestManager.datatSourceIngest.progress.text=DataSource Ingest {0}
IngestManager.fileIngest.progress.text=File Ingest {0}
IngestMessage.toString.type.text=type\: {0}

View File

@ -36,5 +36,5 @@ public interface DataSourceIngestModule extends IngestModule {
* detect cancellation.
* @return A result code indicating success or failure of the processing.
*/
ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper);
ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper);
}

View File

@ -33,5 +33,5 @@ public interface FileIngestModule extends IngestModule {
* @param file The file
* @return A result code indicating success or failure.
*/
ResultCode process(AbstractFile file);
ProcessResult process(AbstractFile file);
}

View File

@ -35,9 +35,6 @@ import org.sleuthkit.datamodel.VolumeSystem;
/**
* Abstract visitor for getting all the files from content.
*/
// RJCTODO: Could this be moved to utility package, is there another version of this
// somewhere? An old comment said something about circular dependencies. Note: will use
// this for per ingest job progress bars.
abstract class GetFilesContentVisitor implements ContentVisitor<Collection<AbstractFile>> {
private static final Logger logger = Logger.getLogger(GetFilesContentVisitor.class.getName());

View File

@ -36,7 +36,6 @@ import javax.swing.JPanel;
import org.openide.util.NbBundle;
import org.sleuthkit.datamodel.Content;
// RJCTODO: Rename to RunIngestModulesDialog after internationalization.
/**
* Dialog box that allows ingest modules to be run on a data source.
* Used outside of the wizards.
@ -50,7 +49,7 @@ public final class IngestDialog extends JDialog {
public IngestDialog(JFrame frame, String title, boolean modal) {
super(frame, title, modal);
ingestConfigurator = new IngestJobLauncher(IngestDialog.class.getCanonicalName());
List<String> messages = ingestConfigurator.getMissingIngestModuleMessages();
List<String> messages = ingestConfigurator.getContextSettingsWarnings();
if (messages.isEmpty() == false) {
StringBuilder warning = new StringBuilder();
for (String message : messages) {

View File

@ -92,7 +92,7 @@ final class IngestJob {
dataSourceIngestPipelines.put(threadId, pipeline);
} else if (!dataSourceIngestPipelines.containsKey(threadId)) {
pipeline = new DataSourceIngestPipeline(this, ingestModuleTemplates);
pipeline.startUp(); // RJCTODO: Get errors and log
pipeline.startUp();
dataSourceIngestPipelines.put(threadId, pipeline);
} else {
pipeline = dataSourceIngestPipelines.get(threadId);
@ -138,7 +138,6 @@ final class IngestJob {
return (dataSourceIngestPipelines.isEmpty() && fileIngestPipelines.isEmpty());
}
// RJCTODO: Write story in JIRA for removing code dunplication
/**
* A data source ingest pipeline composed of a sequence of data source ingest
* modules constructed from ingest module templates.
@ -250,12 +249,12 @@ final class IngestJob {
}
@Override
public void startUp(IngestJobContext context) throws Exception {
public void startUp(IngestJobContext context) throws IngestModuleException {
module.startUp(context);
}
@Override
public IngestModule.ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
return module.process(dataSource, statusHelper);
}
@ -376,12 +375,12 @@ final class IngestJob {
}
@Override
public void startUp(IngestJobContext context) throws Exception {
public void startUp(IngestJobContext context) throws IngestModuleException {
module.startUp(context);
}
@Override
public IngestModule.ResultCode process(AbstractFile file) {
public IngestModule.ProcessResult process(AbstractFile file) {
return module.process(file);
}

View File

@ -58,7 +58,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
for (IngestModuleModel module : modules) {
IngestModuleTemplate moduleTemplate = module.getIngestModuleTemplate();
if (module.hasIngestOptionsPanel()) {
IngestModuleSettings options = module.getIngestOptionsPanel().getIngestJobOptions();
IngestModuleSettings options = module.getIngestOptionsPanel().getSettings();
moduleTemplate.setIngestOptions(options);
}
moduleTemplates.add(moduleTemplate);
@ -295,7 +295,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
private final IngestModuleTemplate moduleTemplate;
private IngestModuleGlobalSetttingsPanel resourcesConfigPanel = null;
private IngestModuleJobSettingsPanel ingestJobOptionsPanel = null;
private IngestModuleSettingsPanel ingestJobOptionsPanel = null;
IngestModuleModel(IngestModuleTemplate moduleTemplate) {
this.moduleTemplate = moduleTemplate;
@ -334,7 +334,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
return moduleTemplate.getIngestModuleFactory().providesModuleSettingsPanel();
}
IngestModuleJobSettingsPanel getIngestOptionsPanel() {
IngestModuleSettingsPanel getIngestOptionsPanel() {
return ingestJobOptionsPanel;
}
@ -347,7 +347,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel {
}
void saveResourcesConfig() {
resourcesConfigPanel.store();
resourcesConfigPanel.saveSettings();
}
}

View File

@ -18,109 +18,28 @@
*/
package org.sleuthkit.autopsy.ingest;
import java.io.File;
import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
* Acts as a facade for the parts of the ingest framework that make up the
* processing context of an ingest module.
* Provides an instance of an ingest module with services specific to the ingest
* job and the ingest pipeline of which the module is a part.
*/
public final class IngestJobContext {
private final IngestJob ingestJob;
private final IngestModuleFactory moduleFactory;
private final IngestManager ingestManager;
private final IngestScheduler scheduler;
private final Case autopsyCase;
private final SleuthkitCase sleuthkitCase;
IngestJobContext(IngestJob ingestJob, IngestModuleFactory moduleFactory) {
this.ingestJob = ingestJob;
this.moduleFactory = moduleFactory;
ingestManager = IngestManager.getDefault();
scheduler = IngestScheduler.getInstance();
autopsyCase = Case.getCurrentCase();
sleuthkitCase = this.autopsyCase.getSleuthkitCase();
}
public boolean isIngestJobCancelled() {
return this.ingestJob.isCancelled();
}
public Case getCase() {
return autopsyCase;
}
public SleuthkitCase getSleuthkitCase() {
return sleuthkitCase;
}
public String getOutputDirectoryAbsolutePath() {
return autopsyCase.getCaseDirectory() + File.separator + Case.getModulesOutputDirRelPath() + File.separator + moduleFactory.getModuleDisplayName();
}
public String getOutputDirectoryRelativePath() {
return "ModuleOutput" + File.separator + moduleFactory.getModuleDisplayName();
}
public void submitFilesForIngest(List<AbstractFile> files) {
public void addFilesToPipeline(List<AbstractFile> files) {
for (AbstractFile file : files) {
ingestManager.scheduleFileTask(ingestJob.getId(), file);
IngestManager.getDefault().scheduleFile(ingestJob.getId(), file); // RJCTODO: Should this API be just AbstractFile?
}
}
public void postIngestMessage(long ID, IngestMessage.MessageType messageType, String subject, String detailsHtml) {
IngestMessage message = IngestMessage.createMessage(ID, messageType, moduleFactory.getModuleDisplayName(), subject, detailsHtml);
ingestManager.postIngestMessage(message);
}
public void postIngestMessage(long ID, IngestMessage.MessageType messageType, String subject) {
IngestMessage message = IngestMessage.createMessage(ID, messageType, moduleFactory.getModuleDisplayName(), subject);
ingestManager.postIngestMessage(message);
}
public void postErrorIngestMessage(long ID, String subject, String detailsHtml) {
IngestMessage message = IngestMessage.createErrorMessage(ID, moduleFactory.getModuleDisplayName(), subject, detailsHtml);
ingestManager.postIngestMessage(message);
}
public void postWarningIngestMessage(long ID, String subject, String detailsHtml) {
IngestMessage message = IngestMessage.createWarningMessage(ID, moduleFactory.getModuleDisplayName(), subject, detailsHtml);
ingestManager.postIngestMessage(message);
}
public void postDataMessage(long ID, String subject, String detailsHtml, String uniqueKey, BlackboardArtifact data) {
IngestMessage message = IngestMessage.createDataMessage(ID, moduleFactory.getModuleDisplayName(), subject, detailsHtml, uniqueKey, data);
ingestManager.postIngestMessage(message);
}
public void fireDataEvent(BlackboardArtifact.ARTIFACT_TYPE artifactType) {
ModuleDataEvent event = new ModuleDataEvent(moduleFactory.getModuleDisplayName(), artifactType);
IngestManager.fireModuleDataEvent(event);
}
public void fireDataEvent(BlackboardArtifact.ARTIFACT_TYPE artifactType, Collection<BlackboardArtifact> artifactIDs) {
ModuleDataEvent event = new ModuleDataEvent(moduleFactory.getModuleDisplayName(), artifactType, artifactIDs);
IngestManager.fireModuleDataEvent(event);
}
// RJCTODO: Make story to convert existing core modules to use logging methods, address sloppy use of level...
public void logInfo(Class moduleClass, String message, Throwable ex) {
Logger.getLogger(moduleClass.getName()).log(Level.INFO, message, ex);
}
public void logWarning(Class moduleClass, String message, Throwable ex) {
Logger.getLogger(moduleClass.getName()).log(Level.WARNING, message, ex);
}
public void logError(Class moduleClass, String message, Throwable ex) {
Logger.getLogger(moduleClass.getName()).log(Level.SEVERE, message, ex);
}
}

View File

@ -25,16 +25,27 @@ import javax.swing.JPanel;
import org.sleuthkit.autopsy.coreutils.ModuleSettings;
import org.sleuthkit.datamodel.Content;
/**
* Provides a mechanism for creating and persisting a context-sensitive ingest
* pipeline configuration and launching ingest jobs to process one or more data
* sources.
*/
public final class IngestJobLauncher {
private static final String ENABLED_INGEST_MODULES_KEY = "Enabled_Ingest_Modules";
private static final String DISABLED_INGEST_MODULES_KEY = "Disabled_Ingest_Modules";
private static final String PARSE_UNALLOC_SPACE_KEY = "Process_Unallocated_Space";
private final String launcherContext;
private final List<String> missingIngestModuleErrorMessages = new ArrayList<>();
private final List<String> contextSettingsWarnings = new ArrayList<>();
private final List<Content> dataSourcesToIngest = new ArrayList<>();
private IngestJobConfigurationPanel ingestConfigPanel;
/**
* Constructs an ingest job launcher that loads and updates the ingest job
* and ingest pipeline for a particular context.
*
* @param launcherContext The context identifier.
*/
public IngestJobLauncher(String launcherContext) {
this.launcherContext = launcherContext;
@ -48,12 +59,30 @@ public final class IngestJobLauncher {
}
// Get the enabled and disabled ingest modules settings for the current
// context. The default settings make all ingest modules enabled.
// context. Observe that the default settings make all loaded ingest
// modules enabled.
HashSet<String> enabledModuleNames = getModulesNamesFromSetting(ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(loadedModuleNames));
HashSet<String> disabledModuleNames = getModulesNamesFromSetting(DISABLED_INGEST_MODULES_KEY, "");
// Create ingest module templates for the current context.
HashSet<String> knownModuleNames = new HashSet<>();
// Check for missing modules.
List<String> missingModuleNames = new ArrayList<>();
for (String moduleName : enabledModuleNames) {
if (!loadedModuleNames.contains(moduleName)) {
missingModuleNames.add(moduleName);
}
}
for (String moduleName : disabledModuleNames) {
if (!loadedModuleNames.contains(moduleName)) {
missingModuleNames.add(moduleName);
}
}
for (String moduleName : missingModuleNames) {
enabledModuleNames.remove(moduleName);
disabledModuleNames.remove(moduleName);
contextSettingsWarnings.add(String.format("Previously loaded %s module could not be found", moduleName));
}
// Create ingest module templates.
List<IngestModuleTemplate> moduleTemplates = new ArrayList<>();
for (IngestModuleFactory moduleFactory : moduleFactories) {
// RJCTODO: Make sure there is a story in JIRA for this.
@ -75,18 +104,10 @@ public final class IngestJobLauncher {
enabledModuleNames.add(moduleName);
}
moduleTemplates.add(moduleTemplate);
knownModuleNames.add(moduleName);
}
// Check for missing modules and update the enabled/disabled ingest
// module settings for any missing modules.
for (String moduleName : enabledModuleNames) {
if (!knownModuleNames.contains(moduleName)) {
missingIngestModuleErrorMessages.add(moduleName + " was previously enabled, but could not be found");
enabledModuleNames.remove(moduleName);
disabledModuleNames.add(moduleName);
}
}
// Update the enabled/disabled ingest module settings to reflect any
// missing modules or newly discovered modules.
ModuleSettings.setConfigSetting(launcherContext, ENABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(enabledModuleNames));
ModuleSettings.setConfigSetting(launcherContext, DISABLED_INGEST_MODULES_KEY, makeCommaSeparatedList(disabledModuleNames));
@ -97,12 +118,12 @@ public final class IngestJobLauncher {
}
boolean processUnallocatedSpace = Boolean.parseBoolean(ModuleSettings.getConfigSetting(launcherContext, PARSE_UNALLOC_SPACE_KEY));
// Make the configuration panel for the current context (view).
// Make the configuration panel for the context.
ingestConfigPanel = new IngestJobConfigurationPanel(moduleTemplates, processUnallocatedSpace);
}
public List<String> getMissingIngestModuleMessages() {
return missingIngestModuleErrorMessages;
public List<String> getContextSettingsWarnings() {
return contextSettingsWarnings;
}
public JPanel getIngestJobConfigPanel() {
@ -134,7 +155,7 @@ public final class IngestJobLauncher {
// options for each ingest module for the current launch context.
}
public void setDataSourcesToIngest(List<Content> dataSourcesToIngest) {
public void setDataSourcesToIngest(List<Content> dataSourcesToIngest) { // RJCTODO: This should really be handled by passing the data sources to startIngestJobs()
this.dataSourcesToIngest.clear();
this.dataSourcesToIngest.addAll(dataSourcesToIngest);
}
@ -180,7 +201,7 @@ public final class IngestJobLauncher {
if (!modulesSetting.isEmpty()) {
String[] settingNames = modulesSetting.split(", ");
for (String name : settingNames) {
// Map some old core module names to the current core module names.
// Map some old core module names to the current core module names. // RJCTODO: Do we have the right names?
switch (name) {
case "Thunderbird Parser":
case "MBox Parser":

View File

@ -273,7 +273,7 @@ public class IngestManager {
* @param pipelineContext ingest context used to ingest parent of the file
* to be scheduled
*/
void scheduleFileTask(long ingestJobId, AbstractFile file) { // RJCTODO: Consider renaming method
void scheduleFile(long ingestJobId, AbstractFile file) {
IngestJob job = this.ingestJobs.get(ingestJobId);
if (job == null) {
// RJCTODO: Handle severe error
@ -497,19 +497,18 @@ public class IngestManager {
@Override
protected Void doInBackground() throws Exception {
logger.log(Level.INFO, String.format("Data source ingest thread {0} started", this.id));
logger.log(Level.INFO, "Data source ingest thread (id={0}) started", this.id);
// Set up a progress bar that can be used to cancel all of the
// ingest jobs currently being performed.
final String displayName = NbBundle
.getMessage(this.getClass(), "IngestManager.IngestAbstractFileProcessor.displayName");
final String displayName = NbBundle.getMessage(this.getClass(), "IngestManager.DataSourceTaskWorker.displayName");
progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "Data source ingest thread {0} cancelled", DataSourceTaskWorker.this.id);
if (progress != null) {
progress.setDisplayName(NbBundle.getMessage(this.getClass(),
"IngestManager.IngestAbstractFileProcessor.process.cancelling",
"IngestManager.DataSourceTaskWorker.process.cancelling",
displayName));
}
IngestManager.getDefault().stopAll();
@ -522,7 +521,7 @@ public class IngestManager {
IngestScheduler.DataSourceScheduler scheduler = IngestScheduler.getInstance().getDataSourceScheduler();
while (scheduler.hasNext()) {
if (isCancelled()) {
logger.log(Level.INFO, "Data source ingest thread {0} cancelled", this.id);
logger.log(Level.INFO, "Data source ingest thread (id={0}) cancelled", this.id);
return null;
}
@ -531,7 +530,7 @@ public class IngestManager {
pipeline.process(this, this.progress);
}
logger.log(Level.INFO, "Data source ingest thread {0} completed", this.id);
logger.log(Level.INFO, "Data source ingest thread (id={0}) completed", this.id);
IngestManager.getDefault().reportThreadDone(this.id);
return null;
}
@ -541,10 +540,10 @@ public class IngestManager {
try {
super.get();
} catch (CancellationException | InterruptedException e) {
logger.log(Level.INFO, "Data source ingest thread {0} cancelled", this.id);
logger.log(Level.INFO, "Data source ingest thread (id={0}) cancelled", this.id);
IngestManager.getDefault().reportThreadDone(this.id);
} catch (Exception ex) {
String message = String.format("Data source ingest thread {0} experienced a fatal error", this.id);
String message = String.format("Data source ingest thread (id=%d) experienced a fatal error", this.id);
logger.log(Level.SEVERE, message, ex);
IngestManager.getDefault().reportThreadDone(this.id);
} finally {
@ -568,7 +567,7 @@ public class IngestManager {
@Override
protected Object doInBackground() throws Exception {
logger.log(Level.INFO, String.format("File ingest thread {0} started", this.id));
logger.log(Level.INFO, "File ingest thread (id={0}) started", this.id);
// Set up a progress bar that can be used to cancel all of the
// ingest jobs currently being performed.
@ -577,10 +576,10 @@ public class IngestManager {
progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "File ingest thread {0} cancelled", FileTaskWorker.this.id);
logger.log(Level.INFO, "File ingest thread (id={0}) cancelled", FileTaskWorker.this.id);
if (progress != null) {
progress.setDisplayName(
NbBundle.getMessage(this.getClass(), "IngestManager.EnqueueWorker.process.cancelling",
NbBundle.getMessage(this.getClass(), "IngestManager.FileTaskWorker.process.cancelling",
displayName));
}
IngestManager.getDefault().stopAll();
@ -597,7 +596,7 @@ public class IngestManager {
while (fileScheduler.hasNext()) {
if (isCancelled()) {
IngestManager.getDefault().reportThreadDone(this.id);
logger.log(Level.INFO, "File ingest thread {0} cancelled", this.id);
logger.log(Level.INFO, "File ingest thread (id={0}) cancelled", this.id);
return null;
}
@ -619,7 +618,7 @@ public class IngestManager {
}
}
logger.log(Level.INFO, "File ingest thread {0} completed", this.id);
logger.log(Level.INFO, "File ingest thread (id={0}) completed", this.id);
IngestManager.getDefault().reportThreadDone(this.id);
return null;
}
@ -630,7 +629,7 @@ public class IngestManager {
try {
super.get();
} catch (CancellationException | InterruptedException e) {
logger.log(Level.INFO, "File ingest thread {0} cancelled", this.id);
logger.log(Level.INFO, "File ingest thread (id={0}) cancelled", this.id);
IngestManager.getDefault().reportThreadDone(this.id);
} catch (Exception ex) {
String message = String.format("File ingest thread {0} experienced a fatal error", this.id);

View File

@ -23,13 +23,24 @@ package org.sleuthkit.autopsy.ingest;
*/
public interface IngestModule {
public enum ResultCode {
public enum ProcessResult {
OK,
ERROR,
ERROR
};
public class IngestModuleException extends Exception {
public IngestModuleException(String message) {
super(message);
}
}
// RJCTODO: Write header comment, make sure to mention "one module instance per thread"
void startUp(IngestJobContext context) throws Exception;
/**
* Invoked by the ingest frame
* @param context
* @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException
*/
void startUp(IngestJobContext context) throws IngestModuleException;
// RJCTODO: Write header comment, make sure to mention "one module instance per thread"
void shutDown(boolean ingestJobWasCancelled);

View File

@ -25,7 +25,7 @@ package org.sleuthkit.autopsy.ingest;
public abstract class IngestModuleAdapter implements IngestModule {
@Override
public void startUp(IngestJobContext context) throws Exception {
public void startUp(IngestJobContext context) throws IngestModuleException {
}
@Override

View File

@ -152,7 +152,7 @@ public interface IngestModuleFactory {
* @param ingestOptions Per ingest job options to initialize the panel.
* @return A user interface panel.
*/
IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions);
IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions);
/**
* Queries the factory to determine if it is capable of creating file ingest

View File

@ -54,7 +54,7 @@ public abstract class IngestModuleFactoryAdapter implements IngestModuleFactory
}
@Override
public IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
public IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
throw new UnsupportedOperationException();
}

View File

@ -19,16 +19,11 @@
package org.sleuthkit.autopsy.ingest;
import javax.swing.JPanel;
import org.sleuthkit.autopsy.corecomponents.OptionsPanel;
/**
* Base class for ingest module resources configuration panels.
*/
public abstract class IngestModuleGlobalSetttingsPanel extends JPanel implements OptionsPanel {
public abstract class IngestModuleGlobalSetttingsPanel extends JPanel {
@Override
public abstract void load();
@Override
public abstract void store();
public abstract void saveSettings();
}

View File

@ -21,14 +21,14 @@ package org.sleuthkit.autopsy.ingest;
import javax.swing.JPanel;
/**
* Abstract base class for ingest module per ingest job options panels.
* Abstract base class for ingest module job settings panels.
*/
public abstract class IngestModuleJobSettingsPanel extends JPanel {
public abstract class IngestModuleSettingsPanel extends JPanel {
/**
* Gets the ingest options for an ingest module.
* Gets the ingest job settings for an ingest module.
*
* @return The ingest options.
* @return The ingest settings.
*/
public abstract IngestModuleSettings getIngestJobOptions();
public abstract IngestModuleSettings getSettings();
}

View File

@ -134,7 +134,7 @@ public final class IngestServices {
*/
public void scheduleFile(long dataSourceTaskId, AbstractFile file) {
logger.log(Level.INFO, "Scheduling file: {0}", file.getName());
manager.scheduleFileTask(dataSourceTaskId, file);
manager.scheduleFile(dataSourceTaskId, file);
}
/**

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.exifparser;
import com.drew.imaging.ImageMetadataReader;
@ -56,8 +55,8 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
*/
public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule {
private IngestServices services;
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private final IngestServices services = IngestServices.getDefault();
private int filesProcessed = 0;
private boolean filesToFire = false;
@ -65,24 +64,15 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws Exception {
services = IngestServices.getDefault();
logger.log(Level.INFO, "init() {0}", this.toString());
filesProcessed = 0;
filesToFire = false;
}
@Override
public ResultCode process(AbstractFile content) {
public ProcessResult process(AbstractFile content) {
//skip unalloc
if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
// skip known
if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
return ResultCode.OK;
return ProcessResult.OK;
}
// update the tree every 1000 files if we have EXIF data that is not being being displayed
@ -94,13 +84,13 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
//skip unsupported
if (!parsableFormat(content)) {
return ResultCode.OK;
return ProcessResult.OK;
}
return processFile(content);
}
ResultCode processFile(AbstractFile f) {
ProcessResult processFile(AbstractFile f) {
InputStream in = null;
BufferedInputStream bin = null;
@ -158,21 +148,17 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
filesToFire = true;
}
return ResultCode.OK;
}
catch (TskCoreException ex) {
return ProcessResult.OK;
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage());
return ResultCode.ERROR;
}
catch (ImageProcessingException ex) {
return ProcessResult.ERROR;
} catch (ImageProcessingException ex) {
logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()});
return ResultCode.ERROR;
}
catch (IOException ex) {
return ProcessResult.ERROR;
} catch (IOException ex) {
logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex);
return ResultCode.ERROR;
}
finally {
return ProcessResult.ERROR;
} finally {
try {
if (in != null) {
in.close();
@ -182,7 +168,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to close InputStream.", ex);
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
}
}
@ -201,7 +187,6 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
@Override
public void shutDown(boolean ingestJobCancelled) {
logger.log(Level.INFO, "completed exif parsing {0}", this.toString());
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));

View File

@ -26,9 +26,9 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.openide.util.NbBundle;
/**
* An factory that creates file ingest modules that do hash database lookups.
* An factory that creates file ingest modules that parse EXIF meta data
* associated with media files (e.g., JPEG format files).
*/
@ServiceProvider(service = IngestModuleFactory.class)
public class ExifParserModuleFactory extends IngestModuleFactoryAdapter {
@ -39,7 +39,7 @@ public class ExifParserModuleFactory extends IngestModuleFactoryAdapter {
}
static String getModuleName() {
return NbBundle.getMessage(ExifParserFileIngestModule.class, // RJCTODO: Change bundles?
return NbBundle.getMessage(ExifParserFileIngestModule.class,
"ExifParserFileIngestModule.moduleName.text");
}

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.fileextmismatch;
import java.awt.Color;
@ -34,11 +33,14 @@ import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.filetypeid.FileTypeIdIngestModule;
import org.sleuthkit.autopsy.corecomponents.OptionsPanel;
/**
* Container panel for File Extension Mismatch Ingest Module advanced configuration options
* Container panel for File Extension Mismatch Ingest Module advanced
* configuration options
*/
final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel {
final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel implements OptionsPanel {
private static Logger logger = Logger.getLogger(FileExtMismatchConfigPanel.class.getName());
private HashMap<String, String[]> editableMap = new HashMap<>();
private ArrayList<String> mimeList = null;
@ -65,7 +67,6 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
// Handle selections on the left table
lsm = mimeTable.getSelectionModel();
lsm.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
ListSelectionModel listSelectionModel = (ListSelectionModel) e.getSource();
@ -96,7 +97,6 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
// Handle selections on the right table
ListSelectionModel extLsm = extTable.getSelectionModel();
extLsm.addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
ListSelectionModel listSelectionModel = (ListSelectionModel) e.getSource();
@ -518,16 +518,7 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
}
@Override
public void load() {
// Load the XML into a buffer that the user can modify. They can choose
// to save it back to the file after making changes.
editableMap = FileExtMismatchXML.getDefault().load();
updateMimeList();
updateExtList();
}
@Override
public void store() {
public void saveSettings() {
if (FileExtMismatchXML.getDefault().save(editableMap)) {
mimeErrLabel.setText(" ");
mimeRemoveErrLabel.setText(" ");
@ -547,6 +538,20 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
}
}
@Override
public void load() {
// Load the XML into a buffer that the user can modify. They can choose
// to save it back to the file after making changes.
editableMap = FileExtMismatchXML.getDefault().load();
updateMimeList();
updateExtList();
}
@Override
public void store() {
saveSettings();
}
private void setIsModified() {
saveButton.setEnabled(true);
saveMsgLabel.setText(" ");
@ -577,7 +582,6 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
boolean valid() {
return true;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton addExtButton;
private javax.swing.JButton addTypeButton;
@ -730,5 +734,4 @@ final class FileExtMismatchConfigPanel extends IngestModuleGlobalSetttingsPanel
fireTableDataChanged();
}
}
}

View File

@ -25,7 +25,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
/**
@ -35,16 +35,16 @@ import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
@ServiceProvider(service = IngestModuleFactory.class)
public class FileExtMismatchDetectorModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return NbBundle.getMessage(FileExtMismatchIngestModule.class,
"FileExtMismatchIngestModule.moduleName");
}
@Override
public String getModuleDisplayName() {
return getModuleName();
}
@Override
public String getModuleDescription() {
return NbBundle.getMessage(FileExtMismatchIngestModule.class,
@ -67,7 +67,7 @@ public class FileExtMismatchDetectorModuleFactory extends IngestModuleFactoryAda
}
@Override
public IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
public IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
FileExtMismatchSimpleConfigPanel ingestOptionsPanel = new FileExtMismatchSimpleConfigPanel((FileExtMismatchDetectorOptions) ingestOptions);
return ingestOptionsPanel;
}
@ -91,6 +91,6 @@ public class FileExtMismatchDetectorModuleFactory extends IngestModuleFactoryAda
@Override
public FileIngestModule createFileIngestModule(IngestModuleSettings ingestOptions) {
return new FileExtMismatchIngestModule();
return new FileExtMismatchIngestModule(); // RJCTODO: Update to pass in options
}
}

View File

@ -47,42 +47,40 @@ import org.sleuthkit.datamodel.TskException;
*/
public class FileExtMismatchIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final Logger logger = Logger.getLogger(FileExtMismatchIngestModule.class.getName());
private static long processTime = 0;
private static int messageId = 0;
private static long numFiles = 0;
private static long processTime = 0; // RJCTODO: This is not thread safe
private static int messageId = 0; // RJCTODO: This is not thread safe
private static long numFiles = 0; // RJCTODO: This is not thread safe
private final IngestServices services = IngestServices.getDefault();
private boolean skipKnown = false;
private boolean skipNoExt = true;
private boolean skipTextPlain = false;
private IngestServices services;
private HashMap<String, String[]> SigTypeToExtMap = new HashMap<>();
FileExtMismatchIngestModule() {
}
@Override
public void startUp(IngestJobContext context) throws Exception {
super.startUp(context);
services = IngestServices.getDefault();
public void startUp(IngestJobContext context) throws IngestModuleException {
FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault();
SigTypeToExtMap = xmlLoader.load();
}
@Override
public ResultCode process(AbstractFile abstractFile) {
public ProcessResult process(AbstractFile abstractFile) {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) ||
(abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
// deleted files often have content that was not theirs and therefor causes mismatch
if ((abstractFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC)) ||
(abstractFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.UNALLOC))) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (skipKnown && (abstractFile.getKnown() == FileKnown.KNOWN)) {
return ResultCode.OK;
return ProcessResult.OK;
}
try
@ -100,10 +98,10 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
services.fireModuleDataEvent(new ModuleDataEvent(FileExtMismatchDetectorModuleFactory.getModuleName(), ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED, Collections.singletonList(bart)));
}
return ResultCode.OK;
return ProcessResult.OK;
} catch (TskException ex) {
logger.log(Level.WARNING, "Error matching file signature", ex);
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
}
@ -161,7 +159,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
//details
detailsSb.append("<table border='0' cellpadding='4' width='280'>");
detailsSb.append("<tr><td>" + FileExtMismatchDetectorModuleFactory.getModuleName() + "</td></tr>");
detailsSb.append("<tr><td>").append(FileExtMismatchDetectorModuleFactory.getModuleName()).append("</td></tr>");
detailsSb.append("<tr><td>").append(
NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalProcTime"))
@ -177,17 +175,17 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
detailsSb.toString()));
}
// RJCTODO: Ingest setting
// RJCTODO: Ingest job setting
public void setSkipKnown(boolean flag) {
skipKnown = flag;
}
// RJCTODO: Ingest setting
// RJCTODO: Ingest job setting
public void setSkipNoExt(boolean flag) {
skipNoExt = flag;
}
// RJCTODO: Ingest setting
// RJCTODO: Ingest job setting
public void setSkipTextPlain(boolean flag) {
skipTextPlain = flag;
}

View File

@ -19,13 +19,13 @@
package org.sleuthkit.autopsy.fileextmismatch;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
/**
* UI component used to set ingest job options for file extension mismatch
* detector ingest modules.
*/
class FileExtMismatchSimpleConfigPanel extends IngestModuleJobSettingsPanel {
class FileExtMismatchSimpleConfigPanel extends IngestModuleSettingsPanel {
private FileExtMismatchDetectorOptions ingestJobOptions;
@ -41,7 +41,7 @@ class FileExtMismatchSimpleConfigPanel extends IngestModuleJobSettingsPanel {
}
@Override
public IngestModuleSettings getIngestJobOptions() {
public IngestModuleSettings getSettings() {
return ingestJobOptions;
}

View File

@ -32,7 +32,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.FileKnown;
import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
/**
@ -45,7 +45,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
private static final long MIN_FILE_SIZE = 512;
private final FileTypeIdentifierIngestJobOptions ingestJobOptions;
private long matchTime = 0;
private int messageId = 0;
private int messageId = 0; // RJCTODO: If this is not made a thread safe static, duplicate message ids will be used
private long numFiles = 0;
// The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed.
// If desired in the future to be more knowledgable about weird files or rare formats, we could
@ -57,20 +57,20 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
}
@Override
public ResultCode process(AbstractFile abstractFile) {
public ProcessResult process(AbstractFile abstractFile) {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)
|| (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (ingestJobOptions.shouldSkipKnownFiles() && (abstractFile.getKnown() == FileKnown.KNOWN)) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (abstractFile.getSize() < MIN_FILE_SIZE) {
return ResultCode.OK;
return ProcessResult.OK;
}
try {
@ -87,13 +87,13 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
// we don't fire the event because we just updated TSK_GEN_INFO, which isn't displayed in the tree and is vague.
}
return ResultCode.OK;
return ProcessResult.OK;
} catch (TskException ex) {
logger.log(Level.WARNING, "Error matching file signature", ex);
return ResultCode.ERROR;
return ProcessResult.ERROR;
} catch (Exception e) {
logger.log(Level.WARNING, "Error matching file signature", e);
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
}
@ -120,6 +120,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI
/**
* Validate if a given mime type is in the detector's registry.
*
* @param mimeType Full string of mime type, e.g. "text/html"
* @return true if detectable
*/

View File

@ -19,13 +19,13 @@
package org.sleuthkit.autopsy.filetypeid;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
/**
* UI component used to set ingest job options for file type identifier ingest
* modules.
*/
class FileTypeIdSimpleConfigPanel extends IngestModuleJobSettingsPanel {
class FileTypeIdSimpleConfigPanel extends IngestModuleSettingsPanel {
private final FileTypeIdentifierIngestJobOptions ingestJobOptions;
@ -40,7 +40,7 @@ class FileTypeIdSimpleConfigPanel extends IngestModuleJobSettingsPanel {
}
@Override
public IngestModuleSettings getIngestJobOptions() {
public IngestModuleSettings getSettings() {
return ingestJobOptions;
}

View File

@ -25,7 +25,7 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
/**
* An factory that creates file ingest modules that determine the types of
@ -66,7 +66,7 @@ public class FileTypeIdentifierModuleFactory extends IngestModuleFactoryAdapter
}
@Override
public IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestJobOptions) {
public IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestJobOptions) {
return new FileTypeIdSimpleConfigPanel((FileTypeIdentifierIngestJobOptions) ingestJobOptions);
}

View File

@ -38,6 +38,7 @@ import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableCellRenderer;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.corecomponents.OptionsPanel;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException;
@ -46,9 +47,11 @@ import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb.KnownFilesType;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
/**
* Instances of this class provide a comprehensive UI for managing the hash sets configuration.
* Instances of this class provide a comprehensive UI for managing the hash sets
* configuration.
*/
public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel implements OptionsPanel {
private static final String NO_SELECTION_TEXT = NbBundle
.getMessage(HashDbConfigPanel.class, "HashDbConfigPanel.noSelectionText");
private static final String ERROR_GETTING_PATH_TEXT = NbBundle
@ -96,8 +99,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
HashDb db = ((HashSetTable) hashSetTable).getSelection();
if (db != null) {
updateComponentsForSelection(db);
}
else {
} else {
updateComponentsForNoSelection();
}
}
@ -141,16 +143,14 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
try {
hashDbLocationLabel.setText(shortenPath(db.getDatabasePath()));
}
catch (TskCoreException ex) {
} catch (TskCoreException ex) {
Logger.getLogger(HashDbConfigPanel.class.getName()).log(Level.SEVERE, "Error getting database path of " + db.getHashSetName() + " hash database", ex);
hashDbLocationLabel.setText(ERROR_GETTING_PATH_TEXT);
}
try {
indexPathLabel.setText(shortenPath(db.getIndexPath()));
}
catch (TskCoreException ex) {
} catch (TskCoreException ex) {
Logger.getLogger(HashDbConfigPanel.class.getName()).log(Level.SEVERE, "Error getting index path of " + db.getHashSetName() + " hash database", ex);
indexPathLabel.setText(ERROR_GETTING_PATH_TEXT);
}
@ -164,13 +164,11 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexStatusText.indexGen"));
hashDbIndexStatusLabel.setForeground(Color.black);
indexButton.setEnabled(false);
}
else if (db.hasIndex()) {
} else if (db.hasIndex()) {
if (db.hasIndexOnly()) {
hashDbIndexStatusLabel.setText(
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexStatusText.indexOnly"));
}
else {
} else {
hashDbIndexStatusLabel.setText(
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexStatusText.indexed"));
}
@ -179,21 +177,18 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
indexButton.setText(
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexButtonText.reIndex"));
indexButton.setEnabled(true);
}
else {
} else {
indexButton.setText(NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexButtonText.index"));
indexButton.setEnabled(false);
}
}
else {
} else {
hashDbIndexStatusLabel.setText(
NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexStatusText.noIndex"));
hashDbIndexStatusLabel.setForeground(Color.red);
indexButton.setText(NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.indexButtonText.index"));
indexButton.setEnabled(true);
}
}
catch (TskCoreException ex) {
} catch (TskCoreException ex) {
Logger.getLogger(HashDbConfigPanel.class.getName()).log(Level.SEVERE, "Error getting index state of hash database", ex);
hashDbIndexStatusLabel.setText(ERROR_GETTING_INDEX_STATUS_TEXT);
hashDbIndexStatusLabel.setForeground(Color.red);
@ -234,13 +229,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
}
@Override
public void load() {
hashSetTable.clearSelection();
hashSetTableModel.refreshModel();
}
@Override
public void store() {
public void saveSettings() {
//Checking for for any unindexed databases
List<HashDb> unindexed = new ArrayList<>();
for (HashDb hashSet : hashSetManager.getAllHashSets()) {
@ -248,8 +237,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
if (!hashSet.hasIndex()) {
unindexed.add(hashSet);
}
}
catch (TskCoreException ex) {
} catch (TskCoreException ex) {
Logger.getLogger(HashDbConfigPanel.class.getName()).log(Level.SEVERE, "Error getting index info for hash database", ex);
}
}
@ -257,14 +245,24 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
//If unindexed ones are found, show a popup box that will either index them, or remove them.
if (unindexed.size() == 1) {
showInvalidIndex(false, unindexed);
}
else if (unindexed.size() > 1){
} else if (unindexed.size() > 1) {
showInvalidIndex(true, unindexed);
}
hashSetManager.save();
}
@Override
public void load() {
hashSetTable.clearSelection();
hashSetTableModel.refreshModel();
}
@Override
public void store() {
saveSettings();
}
public void cancel() {
HashDbManager.getInstance().loadLastSavedConfiguration();
}
@ -277,8 +275,10 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
}
/**
* Displays the popup box that tells user that some of his databases are unindexed, along with solutions.
* This method is related to ModalNoButtons, to be removed at a later date.
* Displays the popup box that tells user that some of his databases are
* unindexed, along with solutions. This method is related to
* ModalNoButtons, to be removed at a later date.
*
* @param plural Whether or not there are multiple unindexed databases
* @param unindexed The list of unindexed databases. Can be of size 1.
*/
@ -290,8 +290,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
}
if (plural) {
message = NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.dbsNotIndexedMsg", total);
}
else{
} else {
message = NbBundle.getMessage(this.getClass(), "HashDbConfigPanel.dbNotIndexedMsg", total);
}
int res = JOptionPane.showConfirmDialog(this, message,
@ -320,6 +319,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
* This class implements a table for displaying configured hash sets.
*/
private class HashSetTable extends JTable {
@Override
public Component prepareRenderer(TableCellRenderer renderer, int row, int column) {
// Use the hash set name as the cell text.
@ -331,8 +331,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
// in red.
if (hashSetTableModel.indexExists(row)) {
cellRenderer.setForeground(Color.black);
}
else{
} else {
cellRenderer.setForeground(Color.red);
}
@ -359,6 +358,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
* configured hash sets.
*/
private class HashSetTableModel extends AbstractTableModel {
List<HashDb> hashSets = HashDbManager.getInstance().getAllHashSets();
@Override
@ -384,8 +384,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
private boolean indexExists(int rowIndex) {
try {
return hashSets.get(rowIndex).hasIndex();
}
catch (TskCoreException ex) {
} catch (TskCoreException ex) {
Logger.getLogger(HashSetTableModel.class.getName()).log(Level.SEVERE, "Error getting index info for hash database", ex);
return false;
}
@ -410,8 +409,7 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
HashDb getHashSetAt(int index) {
if (!hashSets.isEmpty() && index >= 0 && index < hashSets.size()) {
return hashSets.get(index);
}
else {
} else {
return null;
}
}
@ -768,7 +766,6 @@ public final class HashDbConfigPanel extends IngestModuleGlobalSetttingsPanel {
((HashSetTable) hashSetTable).selectRowByName(hashDb.getHashSetName());
}
}//GEN-LAST:event_createDatabaseButtonActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton createDatabaseButton;
private javax.swing.JButton deleteDatabaseButton;

View File

@ -63,7 +63,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws Exception {
public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException {
super.startUp(context);
services = IngestServices.getDefault();
skCase = Case.getCurrentCase().getSleuthkitCase();
@ -110,15 +110,15 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
}
@Override
public ResultCode process(AbstractFile file) {
public ProcessResult process(AbstractFile file) {
// Skip unallocated space files.
if (file.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
// bail out if we have no hashes set
if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) {
return ResultCode.OK;
return ProcessResult.OK;
}
// calc hash value
@ -139,13 +139,13 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.calcHashValueErr",
name)));
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
}
// look up in known bad first
boolean foundBad = false;
ResultCode ret = ResultCode.OK;
ProcessResult ret = ProcessResult.OK;
for (HashDb db : knownBadHashSets) {
try {
long lookupstart = System.currentTimeMillis();
@ -165,7 +165,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.settingKnownBadStateErr",
name)));
ret = ResultCode.ERROR;
ret = ProcessResult.ERROR;
}
String hashSetName = db.getHashSetName();
@ -196,7 +196,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.lookingUpKnownBadHashValueErr",
name)));
ret = ResultCode.ERROR;
ret = ProcessResult.ERROR;
}
}
@ -213,7 +213,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
break;
} catch (TskException ex) {
logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex);
ret = ResultCode.ERROR;
ret = ProcessResult.ERROR;
}
}
lookuptime += (System.currentTimeMillis() - lookupstart);
@ -227,7 +227,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.lookingUpKnownHashValueErr",
name)));
ret = ResultCode.ERROR;
ret = ProcessResult.ERROR;
}
}
}

View File

@ -33,12 +33,12 @@ import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.autopsy.hashdatabase.HashDbManager.HashDb;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
/**
* Instances of this class provide a simplified UI for managing the hash sets configuration.
*/
public class HashDbSimpleConfigPanel extends IngestModuleJobSettingsPanel {
public class HashDbSimpleConfigPanel extends IngestModuleSettingsPanel {
private HashDatabasesTableModel knownTableModel;
private HashDatabasesTableModel knownBadTableModel;
@ -76,7 +76,7 @@ public class HashDbSimpleConfigPanel extends IngestModuleJobSettingsPanel {
}
@Override
public IngestModuleSettings getIngestJobOptions() {
public IngestModuleSettings getSettings() {
HashDbManager hashDbManager = HashDbManager.getInstance();
List<HashDbManager.HashDb> knownFileHashSets = hashDbManager.getKnownFileHashSets();
List<HashDbManager.HashDb> knownBadFileHashSets = hashDbManager.getKnownBadFileHashSets();

View File

@ -25,7 +25,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
/**
@ -64,7 +64,7 @@ public class HashLookupModuleFactory extends IngestModuleFactoryAdapter {
}
@Override
public IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
public IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestOptions) {
HashDbSimpleConfigPanel ingestOptionsPanel = new HashDbSimpleConfigPanel();
ingestOptionsPanel.load();
return ingestOptionsPanel;

View File

@ -29,6 +29,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
// Note: This is a first step towards a keyword lists manager; it consists of
// the portion of the keyword list management code that resided in the keyword
// search file ingest module.
// RJCTODO: How to keyword lists get initialized
final class KeywordListsManager {
private static KeywordListsManager instance = null;
@ -47,6 +48,7 @@ final class KeywordListsManager {
}
private KeywordListsManager() {
addKeywordListsForFileIngest(null);
}
/**
@ -56,7 +58,7 @@ final class KeywordListsManager {
*
* @param listNames The names of disabled lists to temporarily enable
*/
void addKeywordListsForFileIngest(List<String> listNames) {
synchronized void addKeywordListsForFileIngest(List<String> listNames) {
keywords.clear();
keywordListNames.clear();
@ -64,7 +66,7 @@ final class KeywordListsManager {
KeywordSearchListsXML globalKeywordSearchOptions = KeywordSearchListsXML.getCurrent();
for (KeywordList list : globalKeywordSearchOptions.getListsL()) {
String listName = list.getName();
if ((list.getUseForIngest() == true) || (null != listNames && listNames.contains(listName))) {
if ((list.getUseForIngest() == true) || (listNames != null && listNames.contains(listName))) {
keywordListNames.add(listName);
logMessage.append(listName).append(" ");
}
@ -84,7 +86,7 @@ final class KeywordListsManager {
*
* @return The names of the enabled keyword lists
*/
List<String> getNamesOfKeywordListsForFileIngest() {
synchronized List<String> getNamesOfKeywordListsForFileIngest() {
return new ArrayList<>(keywordListNames);
}
@ -94,7 +96,7 @@ final class KeywordListsManager {
*
* @return True if there are no keywords specified, false otherwise
*/
boolean hasNoKeywordsForSearch() {
synchronized boolean hasNoKeywordsForSearch() {
return (keywords.isEmpty());
}
}

View File

@ -19,13 +19,14 @@
package org.sleuthkit.autopsy.keywordsearch;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.corecomponents.OptionsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
/**
* Global options panel for keyword searching.
*/
// RJCTODO: Why is this a public class?
public final class KeywordSearchConfigurationPanel extends IngestModuleGlobalSetttingsPanel {
public final class KeywordSearchConfigurationPanel extends IngestModuleGlobalSetttingsPanel implements OptionsPanel {
private KeywordSearchConfigurationPanel1 listsPanel;
private KeywordSearchConfigurationPanel3 languagesPanel;
@ -84,12 +85,17 @@ public final class KeywordSearchConfigurationPanel extends IngestModuleGlobalSet
}
@Override
public void store() {
public void saveSettings() {
listsPanel.store();
languagesPanel.store();
generalPanel.store();
}
@Override
public void store() {
saveSettings();
}
public void cancel() {
KeywordSearchListsXML.getCurrent().reload();
}

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.awt.event.ActionEvent;
@ -76,11 +75,13 @@ import org.sleuthkit.datamodel.TskData.FileKnown;
* on currently configured lists for ingest and writes results to blackboard
* Reports interesting events to Inbox and to viewers
*
* Registered as a module in layer.xml RJCTODO: Track this down, does not seem to be true
* Registered as a module in layer.xml RJCTODO: Track this down, does not seem
* to be true
*/
public final class KeywordSearchIngestModule extends IngestModuleAdapter implements FileIngestModule {
enum UpdateFrequency {
FAST(20),
AVG(10),
SLOW(5),
@ -96,9 +97,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
return time;
}
};
private static final Logger logger = Logger.getLogger(KeywordSearchIngestModule.class.getName());
private IngestServices services;
private IngestServices services = IngestServices.getDefault();
private Ingester ingester = null;
private volatile boolean commitIndex = false; //whether to commit index next time
private volatile boolean runSearcher = false; //whether to run searcher next time
@ -114,7 +114,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
private Set<Long> curDataSourceIds;
private static final ReentrantReadWriteLock rwLock = new ReentrantReadWriteLock(true); //use fairness policy
private static final Lock searcherLock = rwLock.writeLock();
private volatile int messageID = 0;
private volatile int messageID = 0; // RJCTODO: Despite volatile, this is not thread safe, uses increment (not atomic)
private boolean processedFiles;
private volatile boolean finalSearcherDone = true; //mark as done, until it's inited
private SleuthkitCase caseHandle = null;
@ -124,6 +124,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
private Tika tikaFormatDetector;
private enum IngestStatus {
TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested
STRINGS_INGESTED, ///< Strings were extracted from file
METADATA_INGESTED, ///< No content, so we just text_ingested metadata
@ -136,14 +137,105 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
KeywordSearchIngestModule() {
}
/**
* Initializes the module for new ingest run Sets up threads, timers,
* retrieves settings, keyword lists to run on
*
*/
@Override
public ResultCode process(AbstractFile abstractFile) {
public void startUp(IngestJobContext context) throws IngestModuleException {
logger.log(Level.INFO, "init()");
initialized = false;
caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika();
ingester = Server.getIngester();
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
logger.log(Level.SEVERE, msg);
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), msg, details));
throw new IngestModuleException(msg);
}
} catch (KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
//this means Solr is not properly initialized
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), msg, details));
throw new IngestModuleException(msg);
}
try {
// make an actual query to verify that server is responding
// we had cases where getStatus was OK, but the connection resulted in a 404
server.queryNumIndexedDocuments();
} catch (KeywordSearchModuleException | NoOpenCoreException ex) {
throw new IngestModuleException(
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.exception.errConnToSolr.msg",
ex.getMessage()));
}
//initialize extractors
stringExtractor = new AbstractFileStringExtract(this);
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
//log the scripts used for debugging
final StringBuilder sbScripts = new StringBuilder();
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
sbScripts.append(s.name()).append(" ");
}
logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString());
textExtractors = new ArrayList<>();
//order matters, more specific extractors first
textExtractors.add(new AbstractFileHtmlExtract(this));
textExtractors.add(new AbstractFileTikaTextExtract(this));
ingestStatus = new HashMap<>();
if (KeywordListsManager.getInstance().hasNoKeywordsForSearch()) {
services.postMessage(IngestMessage.createWarningMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
}
processedFiles = false;
finalSearcherDone = false;
searcherDone = true; //make sure to start the initial currentSearcher
//keeps track of all results per run not to repeat reporting the same hits
currentResults = new HashMap<>();
curDataSourceIds = new HashSet<>();
indexer = new Indexer();
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs);
logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs);
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
initialized = true;
commitTimer.start();
searchTimer.start();
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
if (initialized == false) //error initializing indexing/Solr
{
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName());
ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
return ResultCode.OK;
return ProcessResult.OK;
}
try {
//add data source id of the file to the set, keeping track of images being ingested
@ -156,13 +248,13 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
//skip indexing of virtual dirs (no content, no real name) - will index children files
return ResultCode.OK;
return ProcessResult.OK;
}
if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) {
//index meta-data only
indexer.indexFile(abstractFile, false);
return ResultCode.OK;
return ProcessResult.OK;
}
processedFiles = true;
@ -173,7 +265,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
//index the file and content (if the content is supported)
indexer.indexFile(abstractFile, true);
return ResultCode.OK;
return ProcessResult.OK;
}
/**
@ -281,98 +373,6 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
initialized = false;
}
/**
* Initializes the module for new ingest run Sets up threads, timers,
* retrieves settings, keyword lists to run on
*
*/
@Override
public void startUp(IngestJobContext context) throws Exception {
logger.log(Level.INFO, "init()");
services = IngestServices.getDefault();
initialized = false;
caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika();
ingester = Server.getIngester();
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
logger.log(Level.SEVERE, msg);
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), msg, details));
throw new Exception(msg);
}
} catch (KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
//this means Solr is not properly initialized
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), msg, details));
throw new Exception(msg);
}
try {
// make an actual query to verify that server is responding
// we had cases where getStatus was OK, but the connection resulted in a 404
server.queryNumIndexedDocuments();
} catch (KeywordSearchModuleException | NoOpenCoreException ex) {
throw new Exception(
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.exception.errConnToSolr.msg",
ex.getMessage()));
}
//initialize extractors
stringExtractor = new AbstractFileStringExtract(this);
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
//log the scripts used for debugging
final StringBuilder sbScripts = new StringBuilder();
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
sbScripts.append(s.name()).append(" ");
}
logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString());
textExtractors = new ArrayList<>();
//order matters, more specific extractors first
textExtractors.add(new AbstractFileHtmlExtract(this));
textExtractors.add(new AbstractFileTikaTextExtract(this));
ingestStatus = new HashMap<>();
// RJCTODO: Fetch lists for job and check?
// if (keywords.isEmpty() || keywordLists.isEmpty()) {
// services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
// }
processedFiles = false;
finalSearcherDone = false;
searcherDone = true; //make sure to start the initial currentSearcher
//keeps track of all results per run not to repeat reporting the same hits
currentResults = new HashMap<>();
curDataSourceIds = new HashSet<>();
indexer = new Indexer();
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs);
logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs);
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
initialized = true;
commitTimer.start();
searchTimer.start();
}
/**
* Commits index and notifies listeners of index update
*/
@ -435,8 +435,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
if (error_index > 0) {
MessageNotifyUtil.Notify.error(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrsTitle"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxErrMsgFiles", error_index));
}
else if (error_io + error_text > 0) {
} else if (error_io + error_text > 0) {
MessageNotifyUtil.Notify.warn(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxWarnMsgTitle"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.idxErrReadFilesMsg"));
}
@ -597,8 +596,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
* Adds the file to the index. Detects file type, calls extractors, etc.
*
* @param aFile File to analyze
* @param indexContent False if only metadata should be text_ingested. True if
* content and metadata should be index.
* @param indexContent False if only metadata should be text_ingested.
* True if content and metadata should be index.
*/
private void indexFile(AbstractFile aFile, boolean indexContent) {
//logger.log(Level.INFO, "Processing AbstractFile: " + abstractFile.getName());
@ -616,8 +615,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
try {
ingester.ingest(aFile, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED);
}
catch (IngesterException ex) {
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex);
}
@ -630,11 +628,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
try {
is = new ReadContentInputStream(aFile);
detectedFormat = tikaFormatDetector.detect(is, aFile.getName());
}
catch (Exception e) {
} catch (Exception e) {
logger.log(Level.WARNING, "Could not detect format using tika for file: " + aFile, e);
}
finally {
} finally {
if (is != null) {
try {
is.close();
@ -655,8 +651,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
try {
ingester.ingest(aFile, false); //meta-data only
ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED);
}
catch (IngesterException ex) {
} catch (IngesterException ex) {
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex);
}
@ -732,10 +727,10 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
logger.log(Level.INFO, "Pending start of new searcher");
}
final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName") +
(finalRun ? (" - "+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : "");
progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("+
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") +")"), null, new Cancellable() {
final String displayName = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.displayName")
+ (finalRun ? (" - " + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.finalizeMsg")) : "");
progressGroup = AggregateProgressFactory.createSystemHandle(displayName + (" ("
+ NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.doInBackGround.pendingMsg") + ")"), null, new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "Cancelling the searcher by user.");
@ -800,8 +795,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
boolean isRegex = !keywordQuery.isLiteral();
if (isRegex) {
del = new TermComponentQuery(keywordQuery);
}
else {
} else {
del = new LuceneQuery(keywordQuery);
del.escape();
}
@ -1006,8 +1000,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
// call get to see if there were any errors
try {
get();
}
catch (InterruptedException | ExecutionException e) {
} catch (InterruptedException | ExecutionException e) {
logger.log(Level.SEVERE, "Error performing keyword search: " + e.getMessage());
services.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchModuleFactory.getModuleName(), "Error performing keyword search", e.getMessage()));
}

View File

@ -28,13 +28,13 @@ import javax.swing.table.AbstractTableModel;
import javax.swing.table.TableColumn;
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
import org.sleuthkit.autopsy.ingest.NoIngestModuleSettings;
/**
* Ingest job options panel for the keyword search file ingest module.
*/
public class KeywordSearchIngestSimplePanel extends IngestModuleJobSettingsPanel {
public class KeywordSearchIngestSimplePanel extends IngestModuleSettingsPanel {
private final static Logger logger = Logger.getLogger(KeywordSearchIngestSimplePanel.class.getName());
public static final String PROP_OPTIONS = "Keyword Search_Options";
@ -72,7 +72,7 @@ public class KeywordSearchIngestSimplePanel extends IngestModuleJobSettingsPanel
}
@Override
public IngestModuleSettings getIngestJobOptions() {
public IngestModuleSettings getSettings() {
return new NoIngestModuleSettings();
}

View File

@ -26,7 +26,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
import org.sleuthkit.autopsy.ingest.IngestModuleJobSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleSettingsPanel;
import org.sleuthkit.autopsy.ingest.IngestModuleGlobalSetttingsPanel;
/**
@ -60,7 +60,7 @@ public class KeywordSearchModuleFactory extends IngestModuleFactoryAdapter {
}
@Override
public IngestModuleJobSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestJobOptions) {
public IngestModuleSettingsPanel getModuleSettingsPanel(IngestModuleSettings ingestJobOptions) {
KeywordSearchIngestSimplePanel ingestOptionsPanel = new KeywordSearchIngestSimplePanel();
ingestOptionsPanel.load();
return ingestOptionsPanel;

View File

@ -33,6 +33,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.datamodel.*;
abstract class Extract {
@ -47,7 +48,7 @@ abstract class Extract {
Extract() {
}
void init() throws Exception {
void init() throws IngestModuleException {
}
abstract void process(Content dataSource, DataSourceIngestModuleStatusHelper controller);

View File

@ -35,7 +35,7 @@ import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
@ -48,7 +48,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
private static int messageId = 0;
private final List<Extract> extracters = new ArrayList<>();
private final List<Extract> browserExtracters = new ArrayList<>();
private IngestServices services;
private IngestServices services = IngestServices.getDefault();
private StringBuilder subCompleted = new StringBuilder();
RAImageIngestModule() {
@ -59,7 +59,32 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
}
@Override
public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void startUp(IngestJobContext context) throws IngestModuleException {
Extract registry = new ExtractRegistry();
Extract iexplore = new ExtractIE();
Extract recentDocuments = new RecentDocumentsByLnk();
Extract chrome = new Chrome();
Extract firefox = new Firefox();
Extract SEUQA = new SearchEngineURLQueryAnalyzer();
extracters.add(chrome);
extracters.add(firefox);
extracters.add(iexplore);
extracters.add(recentDocuments);
extracters.add(SEUQA); // this needs to run after the web browser modules
extracters.add(registry); // this runs last because it is slowest
browserExtracters.add(chrome);
browserExtracters.add(firefox);
browserExtracters.add(iexplore);
for (Extract extracter : extracters) {
extracter.init();
}
}
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
services.postMessage(IngestMessage.createMessage(getNextMessageId(), MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName()));
controller.switchToDeterminate(extracters.size());
@ -134,7 +159,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
historyMsg.toString());
services.postMessage(inboxMsg);
return ResultCode.OK;
return ProcessResult.OK;
}
@Override
@ -156,33 +181,6 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
}
}
@Override
public void startUp(IngestJobContext context) throws Exception {
services = IngestServices.getDefault();
Extract registry = new ExtractRegistry();
Extract iexplore = new ExtractIE();
Extract recentDocuments = new RecentDocumentsByLnk();
Extract chrome = new Chrome();
Extract firefox = new Firefox();
Extract SEUQA = new SearchEngineURLQueryAnalyzer();
extracters.add(chrome);
extracters.add(firefox);
extracters.add(iexplore);
extracters.add(recentDocuments);
extracters.add(SEUQA); // this needs to run after the web browser modules
extracters.add(registry); // this runs last because it is slowest
browserExtracters.add(chrome);
browserExtracters.add(firefox);
browserExtracters.add(iexplore);
for (Extract extracter : extracters) {
extracter.init();
}
}
private void stop() {
for (Extract extracter : extracters) {
try {

View File

@ -18,6 +18,7 @@
*/
package org.sleuthkit.autopsy.recentactivity;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
@ -26,25 +27,24 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
/**
* A factory that creates data source ingest modules that extract recent activity artifacts.
* A factory that creates data source ingest modules that extract recent
* activity artifacts from data sources.
*/
@ServiceProvider(service = IngestModuleFactory.class)
public class RecentActivityExtracterModuleFactory extends IngestModuleFactoryAdapter {
public final static String MODULE_NAME = "Recent Activity";
public final static String MODULE_DESCRIPTION = "Extracts recent user activity, such as Web browsing, recently used documents and installed programs.";
static String getModuleName() {
return NbBundle.getMessage(RAImageIngestModule.class, "RAImageIngestModule.getName");
}
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return MODULE_NAME;
}
@Override
public String getModuleDescription() {
return MODULE_DESCRIPTION;
return NbBundle.getMessage(RAImageIngestModule.class, "RAImageIngestModule.getDesc");
}
@Override
@ -62,4 +62,3 @@ public class RecentActivityExtracterModuleFactory extends IngestModuleFactoryAda
return new RAImageIngestModule();
}
}

View File

@ -36,6 +36,7 @@ import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
@ -315,16 +316,18 @@ class SearchEngineURLQueryAnalyzer extends Extract {
@Override
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
this.getURLs(dataSource, controller);
logger.info("Search Engine stats: \n" + getTotals());
logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals());
}
@Override
void init() throws Exception {
void init() throws IngestModuleException {
try {
PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE);
init2();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to find " + XMLFILE, e);
String message = "Unable to find " + XMLFILE;
logger.log(Level.SEVERE, message, e);
throw new IngestModuleException(message);
}
}

View File

@ -23,7 +23,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
@ -64,17 +63,65 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
}
@Override
public ResultCode process(AbstractFile abstractFile) {
ScalpelCarver.init();
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
// make sure this is Windows
String os = System.getProperty("os.name");
if (!os.startsWith("Windows")) {
String message = "Scalpel carving module is not compatible with non-Windows OS's at this time.";
logger.log(Level.SEVERE, message);
throw new IngestModuleException(message);
}
carver = new ScalpelCarver();
if (!carver.isInitialized()) {
String message = "Error initializing scalpel carver.";
logger.log(Level.SEVERE, message);
throw new IngestModuleException(message); // RJCTODO: Needs additional internationalization
}
// make sure module output directory exists; create it if it doesn't
moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath()
+ File.separator + MODULE_OUTPUT_DIR_NAME;
File moduleOutputDir = new File(moduleOutputDirPath);
if (!moduleOutputDir.exists()) {
if (!moduleOutputDir.mkdir()) {
String message = "Could not create the output directory for the Scalpel module.";
logger.log(Level.SEVERE, message);
throw new IngestModuleException(message);
}
}
// create path to scalpel config file in user's home directory
configFilePath = PlatformUtil.getUserConfigDirectory()
+ File.separator + configFileName;
// copy the default config file to the user's home directory if one
// is not already there
try {
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
} catch (IOException ex) {
String message = "Could not obtain the path to the Scalpel configuration file.";
logger.log(Level.SEVERE, message, ex);
throw new IngestModuleException(message);
}
initialized = true;
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
ScalpelCarver.init(); // RJCTODO: Is this SclapelCarver class thread-safe?
if (!initialized) {
return ResultCode.OK;
return ProcessResult.OK;
}
// only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS
TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType();
if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
return ResultCode.OK;
return ProcessResult.OK;
}
// create the output directory for this run
@ -82,8 +129,8 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
File scalpelOutputDir = new File(scalpelOutputDirPath);
if (!scalpelOutputDir.exists()) {
if (!scalpelOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath);
return ResultCode.OK;
logger.log(Level.SEVERE, "Could not create Scalpel output directory: {0}", scalpelOutputDirPath);
return ProcessResult.OK;
}
}
@ -96,9 +143,9 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex);
}
while (parent != null) {
if (parent instanceof FileSystem ||
parent instanceof Volume ||
parent instanceof Image) {
if (parent instanceof FileSystem
|| parent instanceof Volume
|| parent instanceof Image) {
id = parent.getId();
break;
}
@ -112,7 +159,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
// make sure we have a valid systemID
if (id == -1) {
logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile.");
return ResultCode.OK;
return ProcessResult.OK;
}
// carve the AbstractFile
@ -120,18 +167,8 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
try {
output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath);
} catch (ScalpelException ex) {
logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId());
return ResultCode.OK;
}
// get the image's size
long imageSize = Long.MAX_VALUE;
try {
imageSize = abstractFile.getImage().getSize();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not obtain the image's size.");
logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID {0}", abstractFile.getId());
return ProcessResult.OK;
}
// add a carved file to the DB for each file that scalpel carved
@ -144,7 +181,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
try {
byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")");
logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile ({0})", abstractFile.getName());
break;
}
@ -184,53 +221,8 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges
}
// reschedule carved files
context.submitFilesForIngest(new ArrayList<AbstractFile>(carvedFiles));
context.addFilesToPipeline(new ArrayList<AbstractFile>(carvedFiles));
return ResultCode.OK;
}
@Override
public void startUp(IngestJobContext context) throws Exception {
this.context = context;
// make sure this is Windows
String os = System.getProperty("os.name");
if (!os.startsWith("Windows")) {
logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time.");
return;
}
carver = new ScalpelCarver();
if (! carver.isInitialized()) {
logger.log(Level.SEVERE, "Error initializing scalpel carver. ");
return;
}
// make sure module output directory exists; create it if it doesn't
moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() +
File.separator + MODULE_OUTPUT_DIR_NAME;
File moduleOutputDir = new File(moduleOutputDirPath);
if (!moduleOutputDir.exists()) {
if (!moduleOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module.");
return;
}
}
// create path to scalpel config file in user's home directory
configFilePath = PlatformUtil.getUserConfigDirectory()
+ File.separator + configFileName;
// copy the default config file to the user's home directory if one
// is not already there
try {
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex);
return;
}
initialized = true;
return ProcessResult.OK;
}
}

View File

@ -18,7 +18,8 @@
*/
package org.sleuthkit.autopsy.scalpel;
// TODO: Uncomment the following line to allow the ingest framework to use this module
// TODO: Uncomment the following line to allow the ingest framework to use this
// module. The dependency has already been added to the project.
//import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;

View File

@ -56,18 +56,20 @@ import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
/**
* 7Zip ingest module Extracts supported archives, adds extracted DerivedFiles,
* 7Zip ingest module extracts supported archives, adds extracted DerivedFiles,
* reschedules extracted DerivedFiles for ingest.
*/
public final class SevenZipIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final Logger logger = Logger.getLogger(SevenZipIngestModule.class.getName());
private IngestServices services;
private volatile int messageID = 0;
private IngestServices services = IngestServices.getDefault();
private volatile int messageID = 0; // RJCTODO: This is not actually thread safe
static final String[] SUPPORTED_EXTENSIONS = {"zip", "rar", "arj", "7z", "7zip", "gzip", "gz", "bzip2", "tar", "tgz",}; // "iso"};
private String unpackDir; //relative to the case, to store in db
private String unpackDirPath; //absolute, to extract to
@ -94,11 +96,15 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
@Override
public void startUp(IngestJobContext context) throws Exception{
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
unpackDir = context.getOutputDirectoryRelativePath();
unpackDirPath = context.getOutputDirectoryAbsolutePath();
fileManager = context.getCase().getServices().getFileManager();
final Case currentCase = Case.getCurrentCase();
unpackDir = Case.getModulesOutputDirRelPath() + File.separator + ArchiveFileExtractorModuleFactory.getModuleName();
unpackDirPath = currentCase.getModulesOutputDirAbsPath() + File.separator + ArchiveFileExtractorModuleFactory.getModuleName();
fileManager = currentCase.getServices().getFileManager();
File unpackDirPathFile = new File(unpackDirPath);
if (!unpackDirPathFile.exists()) {
@ -126,7 +132,6 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
ArchiveFileExtractorModuleFactory.getModuleName());
String details = NbBundle.getMessage(this.getClass(), "SevenZipIngestModule.init.errCantInitLib",
e.getMessage());
//MessageNotifyUtil.Notify.error(msg, details);
services.postMessage(IngestMessage.createErrorMessage(++messageID, ArchiveFileExtractorModuleFactory.getModuleName(), msg, details));
throw new RuntimeException(e);
}
@ -135,17 +140,17 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
@Override
public ResultCode process(AbstractFile abstractFile) {
public ProcessResult process(AbstractFile abstractFile) {
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (abstractFile.isFile() == false || !isSupported(abstractFile)) {
return ResultCode.OK;
return ProcessResult.OK;
}
//check if already has derived files, skip
@ -156,12 +161,12 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
final String localRootAbsPath = getLocalRootAbsPath(uniqueFileName);
if (new File(localRootAbsPath).exists()) {
logger.log(Level.INFO, "File already has been processed as it has children and local unpacked file, skipping: {0}", abstractFile.getName());
return ResultCode.OK;
return ProcessResult.OK;
}
}
} catch (TskCoreException e) {
logger.log(Level.INFO, "Error checking if file already has been processed, skipping: {0}", abstractFile.getName());
return ResultCode.OK;
return ProcessResult.OK;
}
logger.log(Level.INFO, "Processing with archive extractor: {0}", abstractFile.getName());
@ -169,10 +174,10 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
List<AbstractFile> unpackedFiles = unpack(abstractFile);
if (!unpackedFiles.isEmpty()) {
sendNewFilesEvent(abstractFile, unpackedFiles);
context.submitFilesForIngest(unpackedFiles);
context.addFilesToPipeline(unpackedFiles);
}
return ResultCode.OK;
return ProcessResult.OK;
}
private void sendNewFilesEvent(AbstractFile archive, List<AbstractFile> unpackedFiles) {
@ -231,8 +236,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
if (cRatio >= MAX_COMPRESSION_RATIO) {
String itemName = archiveFileItem.getPath();
logger.log(Level.INFO, "Possible zip bomb detected, compression ration: " + cRatio
+ " for in archive item: " + itemName);
logger.log(Level.INFO, "Possible zip bomb detected, compression ration: {0} for in archive item: {1}", new Object[]{cRatio, itemName});
String msg = NbBundle.getMessage(this.getClass(),
"SevenZipIngestModule.isZipBombCheck.warnMsg", archiveName, itemName);
String details = NbBundle.getMessage(this.getClass(),
@ -400,7 +404,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
"SevenZipIngestModule.unpack.notEnoughDiskSpace.details");
//MessageNotifyUtil.Notify.error(msg, details);
services.postMessage(IngestMessage.createErrorMessage(++messageID, ArchiveFileExtractorModuleFactory.getModuleName(), msg, details));
logger.log(Level.INFO, "Skipping archive item due not sufficient disk space for this item: " + archiveFile.getName() + ", " + fileName);
logger.log(Level.INFO, "Skipping archive item due not sufficient disk space for this item: {0}, {1}", new Object[]{archiveFile.getName(), fileName});
continue; //skip this file
} else {
//update est. disk space during this archive, so we don't need to poll for every file extracted
@ -517,9 +521,10 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
//close progress bar
if (progressStarted)
if (progressStarted) {
progress.finish();
}
}
//create artifact and send user message
if (hasEncrypted) {
@ -527,7 +532,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
try {
BlackboardArtifact artifact = archiveFile.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
artifact.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), ArchiveFileExtractorModuleFactory.getModuleName(), encryptionType));
context.fireDataEvent(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED);
services.fireModuleDataEvent(new ModuleDataEvent(ArchiveFileExtractorModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error creating blackboard artifact for encryption detected for file: " + archiveFile, ex);
}
@ -536,8 +541,6 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
String details = NbBundle.getMessage(this.getClass(),
"SevenZipIngestModule.unpack.encrFileDetected.details",
archiveFile.getName(), ArchiveFileExtractorModuleFactory.getModuleName());
// MessageNotifyUtil.Notify.info(msg, details);
services.postMessage(IngestMessage.createWarningMessage(++messageID, ArchiveFileExtractorModuleFactory.getModuleName(), msg, details));
}
@ -565,14 +568,12 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
}
} catch (TskCoreException ex) {
}
// if no blackboard entry for file type, do it manually for ZIP files:
if (attributeFound) {
return false;
}
else {
} else {
return isZipFileHeader(file);
}
}

View File

@ -1,5 +1,5 @@
OpenIDE-Module-Name=ewfVerify
EwfVerifyIngestModule.moduleName.text=EWF Verify
EwfVerifyIngestModule.moduleName.text=EWF Verifier
EwfVerifyIngestModule.moduleDesc.text=Validates the integrity of E01 files.
EwfVerifyIngestModule.process.errProcImg=Error processing {0}
EwfVerifyIngestModule.process.skipNonEwf=Skipping non-ewf image {0}

View File

@ -27,7 +27,8 @@ import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
/**
* An factory that creates file ingest modules that do hash database lookups.
* An factory that creates data source ingest modules that verify the integrity
* of Expert Witness Format (EWF) files (.e01).
*/
@ServiceProvider(service = IngestModuleFactory.class)
public class EwfVerifierModuleFactory extends IngestModuleFactoryAdapter {

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ewfverify;
import java.security.MessageDigest;
@ -47,7 +46,6 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
private static final Logger logger = Logger.getLogger(EwfVerifyIngestModule.class.getName());
private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
private static final IngestServices services = IngestServices.getDefault();
private IngestJobContext context;
private Image img;
private String imgName;
private MessageDigest messageDigest;
@ -61,8 +59,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
}
@Override
public void startUp(IngestJobContext context) throws Exception {
this.context = context;
public void startUp(IngestJobContext context) throws IngestModuleException {
verified = false;
skipped = false;
img = null;
@ -83,7 +80,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
}
@Override
public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
imgName = dataSource.getName();
try {
img = dataSource.getImage();
@ -94,35 +91,33 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.errProcImg",
imgName)));
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
// Skip images that are not E01
if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
img = null;
logger.log(Level.INFO, "Skipping non-ewf image " + imgName);
logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.skipNonEwf",
imgName)));
skipped = true;
return ResultCode.OK;
return ProcessResult.OK;
}
if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
{
if ((img.getMd5() != null) && !img.getMd5().isEmpty()) {
storedHash = img.getMd5().toLowerCase();
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash});
}
else {
} else {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.noStoredHash",
imgName)));
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
logger.log(Level.INFO, "Starting ewf verification of " + img.getName());
logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName());
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.startingImg",
@ -130,7 +125,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
long size = img.getSize();
if (size == 0) {
logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried.");
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(),
NbBundle.getMessage(this.getClass(),
"EwfVerifyIngestModule.process.errGetSizeOfImg",
@ -152,7 +147,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
// Read in byte size chunks and update the hash value with the data.
for (int i = 0; i < totalChunks; i++) {
if (statusHelper.isCancelled()) {
return ResultCode.OK;
return ProcessResult.OK;
}
data = new byte[(int) chunkSize];
try {
@ -162,7 +157,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
"EwfVerifyIngestModule.process.errReadImgAtChunk", imgName, i);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex);
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
messageDigest.update(data);
statusHelper.progress(i);
@ -172,7 +167,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
verified = calculatedHash.equals(storedHash);
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash});
return ResultCode.OK;
return ProcessResult.OK;
}
@Override

View File

@ -49,7 +49,7 @@ public class EmailParserModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleDescription() {
return "This module detects and parses mbox and pst/ost files and populates email artifacts in the blackboard.";
return NbBundle.getMessage(ThunderbirdMboxFileIngestModule.class, "ThunderbirdMboxFileIngestModule.getDesc.text");
}
@Override

View File

@ -30,7 +30,7 @@ import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode;
import org.sleuthkit.autopsy.ingest.IngestModule.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestServices;
@ -47,12 +47,13 @@ import org.sleuthkit.datamodel.TskException;
/**
* File-level ingest module that detects MBOX files based on signature.
* Understands Thunderbird folder layout to provide additional structure and metadata.
* Understands Thunderbird folder layout to provide additional structure and
* metadata.
*/
public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName());
private IngestServices services;
private IngestServices services = IngestServices.getDefault();
private int messageId = 0; // RJCTODO: Not thread safe
private FileManager fileManager;
private IngestJobContext context;
@ -61,20 +62,26 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
}
@Override
public ResultCode process(AbstractFile abstractFile) {
public void startUp(IngestJobContext context) throws IngestModuleException {
this.context = context;
fileManager = Case.getCurrentCase().getServices().getFileManager();
}
@Override
public ProcessResult process(AbstractFile abstractFile) {
// skip known
if (abstractFile.getKnown().equals(TskData.FileKnown.KNOWN)) {
return ResultCode.OK;
return ProcessResult.OK;
}
//skip unalloc
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return ResultCode.OK;
return ProcessResult.OK;
}
if (abstractFile.isVirtual()) {
return ResultCode.OK;
return ProcessResult.OK;
}
// check its signature
@ -99,7 +106,7 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
return processPst(abstractFile);
}
return ResultCode.OK;
return ProcessResult.OK;
}
/**
@ -108,7 +115,7 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
* @param abstractFile The pst/ost data file to process.
* @return
*/
private ResultCode processPst(AbstractFile abstractFile) {
private ProcessResult processPst(AbstractFile abstractFile) {
String fileName = getTempPath() + File.separator + abstractFile.getName()
+ "-" + String.valueOf(abstractFile.getId());
File file = new File(fileName);
@ -120,14 +127,14 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
"ThunderbirdMboxFileIngestModule.processPst.errMsg.outOfDiskSpace",
abstractFile.getName()));
services.postMessage(msg);
return ResultCode.OK;
return ProcessResult.OK;
}
try {
ContentUtils.writeToFile(abstractFile, file);
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed writing pst file to disk.", ex);
return ResultCode.OK;
return ProcessResult.OK;
}
PstParser parser = new PstParser(services);
@ -154,7 +161,7 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
NbBundle.getMessage(this.getClass(),
"ThunderbirdMboxFileIngestModule.processPst.errProcFile.details"));
logger.log(Level.INFO, "PSTParser failed to parse {0}", abstractFile.getName());
return ResultCode.ERROR;
return ProcessResult.ERROR;
}
if (file.delete() == false) {
@ -168,16 +175,17 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
abstractFile.getName()), errors);
}
return ResultCode.OK;
return ProcessResult.OK;
}
/**
* Parse and extract email messages and attachments from an MBox file.
*
* @param abstractFile
* @param ingestContext
* @return
*/
private ResultCode processMBox(AbstractFile abstractFile) {
private ProcessResult processMBox(AbstractFile abstractFile) {
String mboxFileName = abstractFile.getName();
String mboxParentDir = abstractFile.getParentPath();
// use the local path to determine the e-mail folder structure
@ -185,8 +193,7 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
// email folder is everything after "Mail" or ImapMail
if (mboxParentDir.contains("/Mail/")) {
emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/Mail/") + 5);
}
else if (mboxParentDir.contains("/ImapMail/")) {
} else if (mboxParentDir.contains("/ImapMail/")) {
emailFolder = mboxParentDir.substring(mboxParentDir.indexOf("/ImapMail/") + 9);
}
emailFolder = emailFolder + mboxFileName;
@ -203,14 +210,14 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
abstractFile.getName()),
NbBundle.getMessage(this.getClass(),
"ThunderbirdMboxFileIngestModule.processMBox.errProfFile.details"));
return ResultCode.OK;
return ProcessResult.OK;
}
try {
ContentUtils.writeToFile(abstractFile, file);
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed writing mbox file to disk.", ex);
return ResultCode.OK;
return ProcessResult.OK;
}
MboxParser parser = new MboxParser(services, emailFolder);
@ -229,11 +236,12 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
abstractFile.getName()), errors);
}
return ResultCode.OK;
return ProcessResult.OK;
}
/**
* Get a path to a temporary folder.
*
* @return
*/
public static String getTempPath() {
@ -247,8 +255,8 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
}
public static String getModuleOutputPath() {
String outDir = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator +
EmailParserModuleFactory.getModuleName();
String outDir = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator
+ EmailParserModuleFactory.getModuleName();
File dir = new File(outDir);
if (dir.exists() == false) {
dir.mkdirs();
@ -257,20 +265,14 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
}
public static String getRelModuleOutputPath() {
return Case.getModulesOutputDirRelPath() + File.separator +
EmailParserModuleFactory.getModuleName();
}
@Override
public void startUp(IngestJobContext context) throws Exception {
this.context = context;
services = IngestServices.getDefault();
fileManager = Case.getCurrentCase().getServices().getFileManager();
return Case.getModulesOutputDirRelPath() + File.separator
+ EmailParserModuleFactory.getModuleName();
}
/**
* Take the extracted information in the email messages and add the
* appropriate artifacts and derived files.
*
* @param emails
* @param abstractFile
* @param ingestContext
@ -289,12 +291,14 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
services.fireModuleContentEvent(new ModuleContentEvent(derived));
}
}
context.submitFilesForIngest(derivedFiles);
context.addFilesToPipeline(derivedFiles);
services.fireModuleDataEvent(new ModuleDataEvent(EmailParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG));
}
/**
* Add the given attachments as derived files and reschedule them for ingest.
* Add the given attachments as derived files and reschedule them for
* ingest.
*
* @param attachments
* @param abstractFile
* @return
@ -329,6 +333,7 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i
/**
* Add a blackboard artifact for the given email message.
*
* @param email
* @param abstractFile
*/