Check in current state of new ingest framework for safekeeping

This commit is contained in:
Richard Cordovano 2014-03-17 17:45:59 -04:00
parent 631838ff02
commit e99925fb7d
42 changed files with 1261 additions and 1183 deletions

View File

@ -1,108 +1,85 @@
/*
* Sample module in the public domain. Feel free to use this as a template
* for your modules.
*
* Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
* Sample module in the public domain. Feel free to use this as a template
* for your modules.
*
* Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package org.sleuthkit.autopsy.examples;
// RJCTODO: Rework this module for the new interfaces
//import java.util.List;
//import org.apache.log4j.Logger;
//import org.openide.util.Exceptions;
//import org.sleuthkit.autopsy.casemodule.Case;
//import org.sleuthkit.autopsy.casemodule.services.FileManager;
//import org.sleuthkit.autopsy.casemodule.services.Services;
//import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
//import org.sleuthkit.datamodel.AbstractFile;
//import org.sleuthkit.datamodel.Content;
//import org.sleuthkit.datamodel.FsContent;
//import org.sleuthkit.datamodel.SleuthkitCase;
//import org.sleuthkit.datamodel.TskCoreException;
import java.util.List;
import org.apache.log4j.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.casemodule.services.Services;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
// RJCTODO: Remove inheritance from IngestModuleAdapter and provide better documentation.
/**
* Sample DataSource-level ingest module that doesn't do much at all.
* Just exists to show basic idea of these modules
* Sample data source ingest module that doesn't do much. Note that the
* IngestModuleAdapter abstract class could have been used as a base class to
* obtain default implementations of many of the DataSourceIngestModule methods.
*/
// class SampleDataSourceIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleDataSource {
//
// /* Data Source modules operate on a disk or set of logical files. They
// * are passed in teh data source refernce and query it for things they want.
// */
// @Override
// public void process(PipelineContext<IngestModuleDataSource> pipelineContext, Content dataSource, DataSourceIngestModuleStatusHelper controller) {
//
// Case case1 = Case.getCurrentCase();
// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
//
// Services services = new Services(sleuthkitCase);
// FileManager fm = services.getFileManager();
// try {
// /* you can use the findFiles method in FileManager (or similar ones in
// * SleuthkitCase to find files based only on their name. This
// * one finds files that have a .doc extension. */
// List<AbstractFile> docFiles = fm.findFiles(dataSource, "%.doc");
// for (AbstractFile file : docFiles) {
// // do something with each doc file
// }
//
// /* We can also do more general queries with findFilesWhere, which
// * allows us to make our own WHERE clause in the database.
// */
// long currentTime = System.currentTimeMillis()/1000;
// // go back 2 weeks
// long minTime = currentTime - (14 * 24 * 60 * 60);
// List<FsContent> otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime);
// // do something with these files...
//
// } catch (TskCoreException ex) {
// Logger log = Logger.getLogger(SampleDataSourceIngestModule.class);
// log.fatal("Error retrieving files from database: " + ex.getLocalizedMessage());
// return;
// }
// }
//
// @Override
// public void init(IngestModuleInit initContext) {
// // do nothing
// }
//
// @Override
// public void complete() {
// // do nothing
// }
//
// @Override
// public void stop() {
// // do nothing
// }
//
// RJCTODO: Add service provider annotatin (commend out)
class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(SampleDataSourceIngestModule.class);
@Override
public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
Case case1 = Case.getCurrentCase();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
Services services = new Services(sleuthkitCase);
FileManager fileManager = services.getFileManager();
try {
List<AbstractFile> docFiles = fileManager.findFiles(dataSource, "%.doc");
for (AbstractFile file : docFiles) {
// do something with each doc file
}
long currentTime = System.currentTimeMillis() / 1000;
long minTime = currentTime - (14 * 24 * 60 * 60); // Go back two weeks.
List<FsContent> otherFiles = sleuthkitCase.findFilesWhere("crtime > " + minTime);
// do something with these files...
} catch (TskCoreException ex) {
logger.fatal("Error retrieving files from database: " + ex.getLocalizedMessage());
return IngestModule.ResultCode.OK;
}
return IngestModule.ResultCode.OK;
}
// @Override
// public String getName() {
// return "SampleDataSourceIngestModule";
@ -117,9 +94,4 @@ package org.sleuthkit.autopsy.examples;
// public String getDescription() {
// return "Doesn't do much";
// }
//
// @Override
// public boolean hasBackgroundJobsRunning() {
// return false;
// }
//}
}

View File

@ -30,17 +30,21 @@
package org.sleuthkit.autopsy.examples;
//import org.apache.log4j.Logger;
//import org.openide.util.Exceptions;
//import org.sleuthkit.autopsy.casemodule.Case;
//import org.sleuthkit.datamodel.AbstractFile;
//import org.sleuthkit.datamodel.BlackboardArtifact;
//import org.sleuthkit.datamodel.BlackboardAttribute;
//import org.sleuthkit.datamodel.TskCoreException;
//import org.sleuthkit.datamodel.SleuthkitCase;
//import org.sleuthkit.datamodel.TskData;
import org.apache.log4j.Logger;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleContext;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskData;
// RJCTODO
// RJCTODO: Remove inheritance from IngestModuleAdapter and provide better documentation.
/**
* This is a sample and simple module. It is a file-level ingest module, meaning
* that it will get called on each file in the disk image / logical file set.
@ -51,108 +55,85 @@ package org.sleuthkit.autopsy.examples;
* IngestModuleLoader will not load things from the org.sleuthkit.autopsy.examples package.
* Either change the package or the loading code to make this module actually run.
*/
// class SampleFileIngestModule extends org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile {
// private int attrId = -1;
// private static SampleFileIngestModule defaultInstance = null;
//
// // Private to ensure Singleton status
// private SampleFileIngestModule() {
// }
//
// // File-level ingest modules are currently singleton -- this is required
// public static synchronized SampleFileIngestModule getDefault() {
// //defaultInstance is a private static class variable
// if (defaultInstance == null) {
// defaultInstance = new SampleFileIngestModule();
// }
// return defaultInstance;
// }
//
//
// @Override
// public void init(IngestModuleInit initContext) {
// /* For this demo, we are going to make a private attribute to post our
// * results to the blackbaord with. There are many standard blackboard artifact
// * and attribute types and you should first consider using one of those before
// * making private ones because other modules won't know about provate ones.
// * Because our demo has results that have no real value, we do not have an
// * official attribute for them.
// */
// Case case1 = Case.getCurrentCase();
// SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
//
// // see if the type already exists in the blackboard.
// try {
// attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE");
// } catch (TskCoreException ex) {
// // create it if not
// try {
// attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute");
// } catch (TskCoreException ex1) {
// Logger log = Logger.getLogger(SampleFileIngestModule.class);
// log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage());
// attrId = -1;
// }
// }
// }
//
// @Override
// public ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile abstractFile) {
// // skip non-files
// if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) ||
// (abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
// return ProcessResult.OK;
// }
//
// // skip NSRL / known files
// if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) {
// return ProcessResult.OK;
// }
//
//
// /* Do a non-sensical calculation of the number of 0x00 bytes
// * in the first 1024-bytes of the file. This is for demo
// * purposes only.
// */
// try {
// byte buffer[] = new byte[1024];
// int len = abstractFile.read(buffer, 0, 1024);
// int count = 0;
// for (int i = 0; i < len; i++) {
// if (buffer[i] == 0x00) {
// count++;
// }
// }
//
// if (attrId != -1) {
// // Make an attribute using the ID for the private type that we previously created.
// BlackboardAttribute attr = new BlackboardAttribute(attrId, getName(), count);
//
// /* add it to the general info artifact. In real modules, you would likely have
// * more complex data types and be making more specific artifacts.
// */
// BlackboardArtifact art = abstractFile.getGenInfoArtifact();
// art.addAttribute(attr);
// }
//
// return ProcessResult.OK;
// } catch (TskCoreException ex) {
// Exceptions.printStackTrace(ex);
// return ProcessResult.ERROR;
// }
// }
//
//
// @Override
// public void complete() {
//
// }
//
// @Override
// public void stop() {
//
// }
//
// RJCTODO: Add service provider annotatin (commend out)
class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestModule {
private int attrId = -1;
@Override
public void startUp(IngestModuleContext initContext) {
/* For this demo, we are going to make a private attribute to post our
* results to the blackbaord with. There are many standard blackboard artifact
* and attribute types and you should first consider using one of those before
* making private ones because other modules won't know about provate ones.
* Because our demo has results that have no real value, we do not have an
* official attribute for them.
*/
Case case1 = Case.getCurrentCase();
SleuthkitCase sleuthkitCase = case1.getSleuthkitCase();
// see if the type already exists in the blackboard.
try {
attrId = sleuthkitCase.getAttrTypeID("ATTR_SAMPLE");
} catch (TskCoreException ex) {
// create it if not
try {
attrId = sleuthkitCase.addAttrType("ATTR_SAMPLE", "Sample Attribute");
} catch (TskCoreException ex1) {
Logger log = Logger.getLogger(SampleFileIngestModule.class);
log.fatal("Error adding attribute type: " + ex1.getLocalizedMessage());
attrId = -1;
}
}
}
@Override
public IngestModule.ResultCode process(AbstractFile abstractFile) {
// skip non-files
if ((abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) ||
(abstractFile.getType() == TskData.TSK_DB_FILES_TYPE_ENUM.UNUSED_BLOCKS)) {
return IngestModule.ResultCode.OK;
}
// skip NSRL / known files
if (abstractFile.getKnown() == TskData.FileKnown.KNOWN) {
return IngestModule.ResultCode.OK;
}
/* Do a non-sensical calculation of the number of 0x00 bytes
* in the first 1024-bytes of the file. This is for demo
* purposes only.
*/
try {
byte buffer[] = new byte[1024];
int len = abstractFile.read(buffer, 0, 1024);
int count = 0;
for (int i = 0; i < len; i++) {
if (buffer[i] == 0x00) {
count++;
}
}
if (attrId != -1) {
// Make an attribute using the ID for the private type that we previously created.
BlackboardAttribute attr = new BlackboardAttribute(attrId, "SampleFileIngestModule", count); // RJCTODO: Set up with name as exmaple
/* add it to the general info artifact. In real modules, you would likely have
* more complex data types and be making more specific artifacts.
*/
BlackboardArtifact art = abstractFile.getGenInfoArtifact();
art.addAttribute(attr);
}
return IngestModule.ResultCode.OK;
} catch (TskCoreException ex) {
Exceptions.printStackTrace(ex);
return IngestModule.ResultCode.ERROR;
}
}
// RJCTODO: Add a module factory
// @Override
// public String getVersion() {
// return "1.0";
@ -167,10 +148,5 @@ package org.sleuthkit.autopsy.examples;
// public String getDescription() {
// return "Doesn't do much";
// }
//
// @Override
// public boolean hasBackgroundJobsRunning() {
// // we're single threaded...
// return false;
// }
//}
}

View File

@ -21,7 +21,10 @@ package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.Content;
/**
* Interface that must be implemented by all data source ingest modules.
* Interface that must be implemented by all data source ingest modules. Data
* source ingest modules work at the granularity of data sources, while file
* ingest modules work at the granularity of individual files from a data
* source.
*/
public interface DataSourceIngestModule extends IngestModule {
@ -30,7 +33,7 @@ public interface DataSourceIngestModule extends IngestModule {
*
* @param dataSource The data source to process.
* @param statusHelper A status helper to be used to report progress and
* detect ingest job cancellation.
* detect cancellation.
* @return A result code indicating success or failure of the processing.
*/
ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper);

View File

@ -23,8 +23,8 @@ import org.netbeans.api.progress.ProgressHandle;
import org.sleuthkit.datamodel.Content;
/**
* Used by data source ingest modules to report progress and check for ingest
* job cancellation.
* Used by data source ingest modules to report progress and check for data
* source ingest task cancellation.
*/
public class DataSourceIngestModuleStatusHelper {
@ -40,12 +40,12 @@ public class DataSourceIngestModuleStatusHelper {
/**
* Checks for ingest job cancellation. This should be polled by the module
* in its process() method. If the ingest job is canceled, the module should
* return from its process() method as quickly as possible.
* in its process() method. If the ingest task is canceled, the module
* should return from its process() method as quickly as possible.
*
* @return True if the task has been canceled, false otherwise
*/
public boolean isCanceled() {
public boolean isCancelled() {
return worker.isCancelled();
}
@ -67,7 +67,7 @@ public class DataSourceIngestModuleStatusHelper {
* Switches the progress bar to indeterminate mode. This should be called if
* the total work units to process the data source is unknown.
*/
public void switchToInDeterminate() {
public void switchToIndeterminate() {
if (progress != null) {
progress.switchToIndeterminate();
}

View File

@ -1,101 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import javax.swing.SwingWorker;
import org.netbeans.api.progress.ProgressHandle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.Content;
/**
* A data source ingest pipeline composed of a sequence of data source ingest
* modules constructed from ingest module templates. The pipeline is specific to
* a single ingest job.
*/
class DataSourceIngestPipeline {
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
private final IngestJob ingestJob;
private final List<IngestModuleTemplate> moduleTemplates;
private List<DataSourceIngestModule> modules = new ArrayList<>();
DataSourceIngestPipeline(IngestJob ingestJob, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJob = ingestJob;
this.moduleTemplates = moduleTemplates;
}
List<IngestModuleError> startUp() throws Exception {
List<IngestModuleError> errors = new ArrayList<>();
for (IngestModuleTemplate template : moduleTemplates) {
IngestModuleFactory factory = template.getIngestModuleFactory();
if (factory.isDataSourceIngestModuleFactory()) {
IngestModuleSettings ingestOptions = template.getIngestOptions();
DataSourceIngestModule module = factory.createDataSourceIngestModule(ingestOptions);
IngestModuleContext context = new IngestModuleContext(this.ingestJob, factory);
try {
module.startUp(context);
this.modules.add(module);
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName());
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
}
}
}
return errors;
}
List<IngestModuleError> ingestDataSource(SwingWorker worker, ProgressHandle progress) {
List<IngestModuleError> errors = new ArrayList<>();
Content dataSource = this.ingestJob.getDataSource();
logger.log(Level.INFO, "Ingesting data source {0}", dataSource.getName());
for (DataSourceIngestModule module : this.modules) {
try {
progress.start();
progress.switchToIndeterminate();
module.process(dataSource, new DataSourceIngestModuleStatusHelper(worker, progress, dataSource));
progress.finish();
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
}
IngestModuleContext context = module.getContext();
if (context.isIngestJobCancelled()) {
break;
}
}
return errors;
}
List<IngestModuleError> shutDown(boolean ingestJobCancelled) {
List<IngestModuleError> errors = new ArrayList<>();
for (DataSourceIngestModule module : this.modules) {
try {
module.shutDown(ingestJobCancelled);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
} finally {
IngestModuleContext context = module.getContext();
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), context.getModuleDisplayName());
}
}
return errors;
}
}

View File

@ -0,0 +1,394 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import javax.swing.SwingWorker;
import org.netbeans.api.progress.ProgressHandle;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
* Encapsulates a data source and the ingest module pipelines to be used to
* ingest the data source.
*/
final class DataSourceIngestTask {
private final long id;
private final Content dataSource;
private final List<IngestModuleTemplate> ingestModuleTemplates;
private final boolean processUnallocatedSpace;
private final HashMap<Long, FileIngestPipeline> fileIngestPipelines = new HashMap<>();
private final HashMap<Long, DataSourceIngestPipeline> dataSourceIngestPipelines = new HashMap<>();
private FileIngestPipeline initialFileIngestPipeline = null;
private DataSourceIngestPipeline initialDataSourceIngestPipeline = null;
private boolean cancelled;
DataSourceIngestTask(long id, Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) {
this.id = id;
this.dataSource = dataSource;
this.ingestModuleTemplates = ingestModuleTemplates;
this.processUnallocatedSpace = processUnallocatedSpace;
this.cancelled = false;
}
long getId() {
return id;
}
Content getDataSource() {
return dataSource;
}
boolean shouldProcessUnallocatedSpace() {
return processUnallocatedSpace;
}
synchronized void cancel() {
cancelled = true;
}
synchronized boolean isCancelled() {
return cancelled;
}
synchronized List<IngestModuleError> startUpIngestPipelines() {
// Create a per thread instance of each pipeline type right now to make
// (reasonably) sure that the ingest modules can be started.
initialDataSourceIngestPipeline = new DataSourceIngestPipeline(this, ingestModuleTemplates);
initialFileIngestPipeline = new FileIngestPipeline(this, ingestModuleTemplates);
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(initialDataSourceIngestPipeline.startUp());
errors.addAll(initialFileIngestPipeline.startUp());
return errors;
}
synchronized DataSourceIngestPipeline getDataSourceIngestPipelineForThread(long threadId) {
DataSourceIngestPipeline pipeline;
if (initialDataSourceIngestPipeline != null) {
pipeline = initialDataSourceIngestPipeline;
initialDataSourceIngestPipeline = null;
dataSourceIngestPipelines.put(threadId, pipeline);
} else if (!dataSourceIngestPipelines.containsKey(threadId)) {
pipeline = new DataSourceIngestPipeline(this, ingestModuleTemplates);
pipeline.startUp(); // RJCTODO: If time permits, return possible errors with pipeline or some such thing
dataSourceIngestPipelines.put(threadId, pipeline);
} else {
pipeline = dataSourceIngestPipelines.get(threadId);
}
return pipeline;
}
synchronized FileIngestPipeline getFileIngestPipelineForThread(long threadId) {
FileIngestPipeline pipeline;
if (initialFileIngestPipeline != null) {
pipeline = initialFileIngestPipeline;
initialFileIngestPipeline = null;
fileIngestPipelines.put(threadId, pipeline);
} else if (!fileIngestPipelines.containsKey(threadId)) {
pipeline = new FileIngestPipeline(this, ingestModuleTemplates);
pipeline.startUp();
fileIngestPipelines.put(threadId, pipeline);
} else {
pipeline = fileIngestPipelines.get(threadId);
}
return pipeline;
}
synchronized List<IngestModuleError> releaseIngestPipelinesForThread(long threadId) {
List<IngestModuleError> errors = new ArrayList<>();
DataSourceIngestPipeline dataSourceIngestPipeline = dataSourceIngestPipelines.get(threadId);
if (dataSourceIngestPipeline != null) {
errors.addAll(dataSourceIngestPipeline.shutDown(cancelled));
}
this.dataSourceIngestPipelines.remove(threadId);
FileIngestPipeline fileIngestPipeline = fileIngestPipelines.get(threadId);
if (fileIngestPipeline != null) {
errors.addAll(fileIngestPipeline.shutDown(cancelled));
}
this.fileIngestPipelines.remove(threadId);
return errors;
}
synchronized boolean areIngestPipelinesShutDown() {
return (dataSourceIngestPipelines.isEmpty() && fileIngestPipelines.isEmpty());
}
// RJCTODO: Write story in JIRA for removing code dunplication
/**
* A data source ingest pipeline composed of a sequence of data source ingest
* modules constructed from ingest module templates.
*/
static final class DataSourceIngestPipeline {
private static final Logger logger = Logger.getLogger(DataSourceIngestPipeline.class.getName());
private final DataSourceIngestTask task;
private final List<IngestModuleTemplate> moduleTemplates;
private List<DataSourceIngestModuleDecorator> modules = new ArrayList<>();
private DataSourceIngestPipeline(DataSourceIngestTask task, List<IngestModuleTemplate> moduleTemplates) {
this.task = task;
this.moduleTemplates = moduleTemplates;
}
private List<IngestModuleError> startUp() {
List<IngestModuleError> errors = new ArrayList<>();
// Create an ingest module instance from each ingest module template
// that has an ingest module factory capable of making data source
// ingest modules. Map the module class names to the module instance
// to allow the modules to be put in the sequence indicated by the
// ingest pipelines configuration.
Map<String, DataSourceIngestModuleDecorator> modulesByClass = new HashMap<>();
for (IngestModuleTemplate template : moduleTemplates) {
IngestModuleFactory factory = template.getIngestModuleFactory();
if (factory.isDataSourceIngestModuleFactory()) {
IngestModuleSettings ingestOptions = template.getIngestOptions();
DataSourceIngestModuleDecorator module = new DataSourceIngestModuleDecorator(factory.createDataSourceIngestModule(ingestOptions), factory.getModuleDisplayName());
IngestModuleContext context = new IngestModuleContext(task, factory);
try {
module.startUp(context);
modulesByClass.put(module.getClass().getCanonicalName(), module);
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName());
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
}
}
// Establish the module sequence of the core ingest modules
// indicated by the ingest pipeline configuration, adding any
// additional modules found in the global lookup to the end of the
// pipeline in arbitrary order.
List<String> pipelineConfig = IngestPipelinesConfiguration.getInstance().getDataSourceIngestPipelineConfig();
for (String moduleClassName : pipelineConfig) {
if (modulesByClass.containsKey(moduleClassName)) {
modules.add(modulesByClass.remove(moduleClassName));
}
}
for (DataSourceIngestModuleDecorator module : modulesByClass.values()) {
modules.add(module);
}
return errors;
}
List<IngestModuleError> process(SwingWorker worker, ProgressHandle progress) {
List<IngestModuleError> errors = new ArrayList<>();
Content dataSource = this.task.getDataSource();
logger.log(Level.INFO, "Processing data source {0}", dataSource.getName());
for (DataSourceIngestModuleDecorator module : this.modules) {
try {
progress.start();
progress.switchToIndeterminate();
module.process(dataSource, new DataSourceIngestModuleStatusHelper(worker, progress, dataSource));
progress.finish();
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
if (task.isCancelled()) {
break;
}
}
return errors;
}
private List<IngestModuleError> shutDown(boolean ingestJobCancelled) {
List<IngestModuleError> errors = new ArrayList<>();
for (DataSourceIngestModuleDecorator module : this.modules) {
try {
module.shutDown(ingestJobCancelled);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
} finally {
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), module.getDisplayName());
}
}
return errors;
}
private static class DataSourceIngestModuleDecorator implements DataSourceIngestModule {
private final DataSourceIngestModule module;
private final String displayName;
DataSourceIngestModuleDecorator(DataSourceIngestModule module, String displayName) {
this.module = module;
this.displayName = displayName;
}
String getClassName() {
return module.getClass().getCanonicalName();
}
String getDisplayName() {
return displayName;
}
@Override
public void startUp(IngestModuleContext context) throws Exception {
module.startUp(context);
}
@Override
public IngestModule.ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
return module.process(dataSource, statusHelper);
}
@Override
public void shutDown(boolean ingestJobWasCancelled) {
module.shutDown(ingestJobWasCancelled);
}
}
}
/**
* A file ingest pipeline composed of a sequence of file ingest modules
* constructed from ingest module templates.
*/
static final class FileIngestPipeline {
private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName());
private final DataSourceIngestTask task;
private final List<IngestModuleTemplate> moduleTemplates;
private List<FileIngestModuleDecorator> modules = new ArrayList<>();
private FileIngestPipeline(DataSourceIngestTask task, List<IngestModuleTemplate> moduleTemplates) {
this.task = task;
this.moduleTemplates = moduleTemplates;
}
private List<IngestModuleError> startUp() {
List<IngestModuleError> errors = new ArrayList<>();
// Create an ingest module instance from each ingest module template
// that has an ingest module factory capable of making data source
// ingest modules. Map the module class names to the module instance
// to allow the modules to be put in the sequence indicated by the
// ingest pipelines configuration.
Map<String, FileIngestModuleDecorator> modulesByClass = new HashMap<>();
for (IngestModuleTemplate template : moduleTemplates) {
IngestModuleFactory factory = template.getIngestModuleFactory();
if (factory.isFileIngestModuleFactory()) {
IngestModuleSettings ingestOptions = template.getIngestOptions();
FileIngestModuleDecorator module = new FileIngestModuleDecorator(factory.createFileIngestModule(ingestOptions), factory.getModuleDisplayName());
IngestModuleContext context = new IngestModuleContext(task, factory);
try {
module.startUp(context);
modulesByClass.put(module.getClass().getCanonicalName(), module);
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName());
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
}
}
// Establish the module sequence of the core ingest modules
// indicated by the ingest pipeline configuration, adding any
// additional modules found in the global lookup to the end of the
// pipeline in arbitrary order.
List<String> pipelineConfig = IngestPipelinesConfiguration.getInstance().getFileIngestPipelineConfig();
for (String moduleClassName : pipelineConfig) {
if (modulesByClass.containsKey(moduleClassName)) {
modules.add(modulesByClass.remove(moduleClassName));
}
}
for (FileIngestModuleDecorator module : modulesByClass.values()) {
modules.add(module);
}
return errors;
}
List<IngestModuleError> process(AbstractFile file) {
List<IngestModuleError> errors = new ArrayList<>();
Content dataSource = this.task.getDataSource();
logger.log(Level.INFO, String.format("Processing {0} from {1}", file.getName(), dataSource.getName()));
for (FileIngestModuleDecorator module : this.modules) {
try {
module.process(file);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
}
if (task.isCancelled()) {
break;
}
}
file.close();
IngestManager.fireFileDone(file.getId());
return errors;
}
private List<IngestModuleError> shutDown(boolean ingestJobCancelled) {
List<IngestModuleError> errors = new ArrayList<>();
for (FileIngestModuleDecorator module : this.modules) {
try {
module.shutDown(ingestJobCancelled);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getDisplayName(), ex));
} finally {
IngestManager.fireModuleEvent(IngestManager.IngestModuleEvent.COMPLETED.toString(), module.getDisplayName());
}
}
return errors;
}
private static class FileIngestModuleDecorator implements FileIngestModule {
private final FileIngestModule module;
private final String displayName;
FileIngestModuleDecorator(FileIngestModule module, String displayName) {
this.module = module;
this.displayName = displayName;
}
String getClassName() {
return module.getClass().getCanonicalName();
}
String getDisplayName() {
return displayName;
}
@Override
public void startUp(IngestModuleContext context) throws Exception {
module.startUp(context);
}
@Override
public IngestModule.ResultCode process(AbstractFile file) {
return module.process(file);
}
@Override
public void shutDown(boolean ingestJobWasCancelled) {
module.shutDown(ingestJobWasCancelled);
}
}
}
}

View File

@ -21,7 +21,9 @@ package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Interface that must be implemented by all file ingest modules.
* Interface that must be implemented by all file ingest modules. File ingest
* modules work at the granularity of individual files from a data source, while
* data source ingest modules work at the granularity of data sources.
*/
public interface FileIngestModule extends IngestModule {

View File

@ -1,100 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
/**
* A file ingest pipeline composed of a sequence of file ingest modules
* constructed from ingest module templates. The pipeline is specific to a
* single ingest job.
*/
class FileIngestPipeline {
private static final Logger logger = Logger.getLogger(FileIngestPipeline.class.getName());
private final IngestJob ingestJob;
private final List<IngestModuleTemplate> moduleTemplates;
private List<FileIngestModule> modules = new ArrayList<>();
FileIngestPipeline(IngestJob ingestJob, List<IngestModuleTemplate> moduleTemplates) {
this.ingestJob = ingestJob;
this.moduleTemplates = moduleTemplates;
}
List<IngestModuleError> startUp() {
List<IngestModuleError> errors = new ArrayList<>();
for (IngestModuleTemplate template : moduleTemplates) {
IngestModuleFactory factory = template.getIngestModuleFactory();
if (factory.isFileIngestModuleFactory()) {
IngestModuleSettings ingestOptions = template.getIngestOptions();
FileIngestModule module = factory.createFileIngestModule(ingestOptions);
IngestModuleContext context = new IngestModuleContext(this.ingestJob, factory);
try {
module.startUp(context);
this.modules.add(module);
IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), factory.getModuleDisplayName());
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
}
}
}
return errors;
}
List<IngestModuleError> ingestFile(AbstractFile file) {
List<IngestModuleError> errors = new ArrayList<>();
Content dataSource = this.ingestJob.getDataSource();
logger.log(Level.INFO, String.format("Ingesting {0} from {1}", file.getName(), dataSource.getName()));
for (FileIngestModule module : this.modules) {
try {
module.process(file);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
}
IngestModuleContext context = module.getContext();
if (context.isIngestJobCancelled()) {
break;
}
}
file.close();
IngestManager.fireFileDone(file.getId());
return errors;
}
List<IngestModuleError> shutDown(boolean ingestJobCancelled) {
List<IngestModuleError> errors = new ArrayList<>();
for (FileIngestModule module : this.modules) {
try {
module.shutDown(ingestJobCancelled);
} catch (Exception ex) {
errors.add(new IngestModuleError(module.getContext().getModuleDisplayName(), ex));
} finally {
IngestModuleContext context = module.getContext();
IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), context.getModuleDisplayName());
}
}
return errors;
}
}

View File

@ -33,9 +33,10 @@ import org.sleuthkit.datamodel.Volume;
import org.sleuthkit.datamodel.VolumeSystem;
/**
* Abstract visitor for getting all the files from content
* TODO should be moved to utility module (needs resolve cyclic deps)
* Abstract visitor for getting all the files from content.
*/
// TODO Could be moved to utility package, is there another version of this
// somewhere?
abstract class GetFilesContentVisitor implements ContentVisitor<Collection<AbstractFile>> {
private static final Logger logger = Logger.getLogger(GetFilesContentVisitor.class.getName());

View File

@ -52,7 +52,7 @@ public class IngestConfigurator {
// Get the ingest module factories discovered by the ingest module
// loader.
// RJCTODO: Put in name uniqueness test/solution in loader!
List<IngestModuleFactory> moduleFactories = IngestModuleLoader.getDefault().getIngestModuleFactories();
List<IngestModuleFactory> moduleFactories = IngestModuleLoader.getInstance().getIngestModuleFactories();
HashSet<String> loadedModuleNames = new HashSet<>();
for (IngestModuleFactory moduleFactory : moduleFactories) {
loadedModuleNames.add(moduleFactory.getModuleDisplayName());
@ -209,6 +209,7 @@ public class IngestConfigurator {
return csvList.toString();
}
// RJCTODO: May need additional mappings
private HashSet<String> getModulesNamesFromSetting(String key, String defaultSetting) {
// Get the ingest modules setting from the user's config file.
// If there is no such setting yet, create the default setting.
@ -227,8 +228,7 @@ public class IngestConfigurator {
moduleNames.add("Email Parser");
break;
case "File Extension Mismatch Detection":
case "Extension Mismatch Detector":
moduleNames.add("File Extension Mismatch Detector");
moduleNames.add("Extension Mismatch Detector");
break;
default:
moduleNames.add(name);

View File

@ -1,131 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Objects;
import org.sleuthkit.datamodel.Content;
/**
* Encapsulates a data source and the ingest module pipelines to be used to
* ingest the data source.
*/
class IngestJob {
private final long id;
private final Content dataSource;
private final List<IngestModuleTemplate> ingestModuleTemplates;
private final boolean processUnallocatedSpace;
private final HashMap<Long, FileIngestPipeline> fileIngestPipelines = new HashMap<>();
private final HashMap<Long, DataSourceIngestPipeline> dataSourceIngestPipelines = new HashMap<>();
private FileIngestPipeline initialFileIngestPipeline = null;
private DataSourceIngestPipeline initialDataSourceIngestPipeline = null;
private volatile boolean cancelled;
IngestJob(long id, Content dataSource, List<IngestModuleTemplate> ingestModuleTemplates, boolean processUnallocatedSpace) {
this.id = id;
this.dataSource = dataSource;
this.ingestModuleTemplates = ingestModuleTemplates;
this.processUnallocatedSpace = processUnallocatedSpace;
this.cancelled = false;
}
long getId() {
return id;
}
Content getDataSource() {
return dataSource;
}
boolean getProcessUnallocatedSpace() {
return this.processUnallocatedSpace;
}
synchronized void cancel() {
this.cancelled = true;
}
synchronized boolean isCancelled() { // RJCTODO: It seems like this is only used in the pipelines, where it no longer belongs, I think...
return this.cancelled;
}
synchronized List<IngestModuleError> startUpIngestPipelines() throws Exception {
// Create at least one instance of each pipeline type now to make
// reasonably sure the ingest modules can be started.
List<IngestModuleError> errors = new ArrayList<>();
errors.addAll(this.initialDataSourceIngestPipeline.startUp());
errors.addAll(this.initialFileIngestPipeline.startUp());
return errors;
}
synchronized FileIngestPipeline getFileIngestPipelineForThread(long threadId) {
FileIngestPipeline pipeline;
if (null != this.initialFileIngestPipeline) {
pipeline = this.initialFileIngestPipeline;
this.initialDataSourceIngestPipeline = null;
fileIngestPipelines.put(threadId, pipeline);
} else if (!fileIngestPipelines.containsKey(threadId)) {
pipeline = new FileIngestPipeline(this, this.ingestModuleTemplates);
fileIngestPipelines.put(threadId, pipeline);
} else {
pipeline = fileIngestPipelines.get(threadId);
}
return pipeline;
}
synchronized DataSourceIngestPipeline getDataSourceIngestPipelineForThread(long threadId) {
DataSourceIngestPipeline pipeline;
if (null != this.initialDataSourceIngestPipeline) {
pipeline = this.initialDataSourceIngestPipeline;
this.initialDataSourceIngestPipeline = null;
dataSourceIngestPipelines.put(threadId, pipeline);
} else if (!dataSourceIngestPipelines.containsKey(threadId)) {
pipeline = new DataSourceIngestPipeline(this, this.ingestModuleTemplates);
dataSourceIngestPipelines.put(threadId, pipeline);
} else {
pipeline = dataSourceIngestPipelines.get(threadId);
}
return pipeline;
}
synchronized List<IngestModuleError> releaseIngestPipelinesForThread(long threadId) {
List<IngestModuleError> errors = new ArrayList<>();
DataSourceIngestPipeline dataSourceIngestPipeline = dataSourceIngestPipelines.get(threadId);
if (dataSourceIngestPipeline != null) {
errors.addAll(dataSourceIngestPipeline.shutDown(this.cancelled));
}
this.dataSourceIngestPipelines.remove(threadId);
FileIngestPipeline fileIngestPipeline = fileIngestPipelines.get(threadId);
if (fileIngestPipeline != null) {
errors.addAll(fileIngestPipeline.shutDown(this.cancelled));
}
this.fileIngestPipelines.remove(threadId);
return errors;
}
synchronized boolean arePipelinesShutDown() {
return (dataSourceIngestPipelines.isEmpty() && fileIngestPipelines.isEmpty());
}
}

View File

@ -50,7 +50,7 @@ public class IngestManager {
private static IngestManager instance;
private final IngestScheduler scheduler;
private final IngestMonitor ingestMonitor = new IngestMonitor();
private final HashMap<Long, IngestJob> ingestJobs = new HashMap<>();
private final HashMap<Long, DataSourceIngestTask> ingestJobs = new HashMap<>();
private TaskSchedulingWorker taskSchedulingWorker;
private FileTaskWorker fileTaskWorker;
private DataSourceTaskWorker dataSourceTaskWorker;
@ -261,8 +261,8 @@ public class IngestManager {
* @param pipelineContext ingest context used to ingest parent of the file
* to be scheduled
*/
void scheduleFileTask(long ingestJobId, AbstractFile file) {
IngestJob job = this.ingestJobs.get(ingestJobId); // RJCTODO: Consider renaming
void scheduleFileTask(long ingestJobId, AbstractFile file) { // RJCTODO: With the module context, this can be passed the task itself
DataSourceIngestTask job = this.ingestJobs.get(ingestJobId); // RJCTODO: Consider renaming
if (job == null) {
// RJCTODO: Handle severe error
}
@ -303,35 +303,45 @@ public class IngestManager {
}
synchronized void reportThreadDone(long threadId) {
for (IngestJob job : ingestJobs.values()) {
for (DataSourceIngestTask job : ingestJobs.values()) {
job.releaseIngestPipelinesForThread(threadId);
// RJCTODO: Add logging of errors or send ingest messages
if (job.arePipelinesShutDown()) {
if (job.areIngestPipelinesShutDown()) {
ingestJobs.remove(job.getId());
}
}
}
synchronized void stopAll() {
for (IngestJob job : ingestJobs.values()) {
job.cancel();
}
// First get the task scheduling worker to stop.
if (taskSchedulingWorker != null) {
taskSchedulingWorker.cancel(true);
while (!taskSchedulingWorker.isDone()) {
// Wait.
// RJCTODO: Add sleep?
}
taskSchedulingWorker = null;
}
scheduler.getFileScheduler().empty();
scheduler.getDataSourceScheduler().empty();
// Now mark all of the ingest jobs as cancelled. This way the ingest
// modules will know they are being shut down due to cancellation when
// the ingest worker threads release their pipelines.
for (DataSourceIngestTask job : ingestJobs.values()) {
job.cancel();
}
// Cancel the worker threads.
if (dataSourceTaskWorker != null) {
dataSourceTaskWorker.cancel(true);
}
if (fileTaskWorker != null) {
fileTaskWorker.cancel(true);
}
// Jettision the remaining tasks. This will dispose of any tasks that
// the scheduling worker queued up before it was cancelled.
scheduler.getFileScheduler().empty();
scheduler.getDataSourceScheduler().empty();
}
/**
@ -378,6 +388,7 @@ public class IngestManager {
private final List<Content> dataSources;
private final List<IngestModuleTemplate> moduleTemplates;
private final boolean processUnallocatedSpace;
private final List<Long> scheduledJobIds = new ArrayList<>();
private ProgressHandle progress;
TaskSchedulingWorker(List<Content> dataSources, List<IngestModuleTemplate> moduleTemplates, boolean processUnallocatedSpace) {
@ -388,6 +399,8 @@ public class IngestManager {
@Override
protected Object doInBackground() throws Exception {
// Set up a progress bar that can be used to cancel all of the
// ingest jobs currently being performed.
final String displayName = "Queueing ingest tasks";
progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override
@ -396,20 +409,25 @@ public class IngestManager {
if (progress != null) {
progress.setDisplayName(displayName + " (Cancelling...)");
}
return TaskSchedulingWorker.this.cancel(true);
IngestManager.getDefault().stopAll();
return true;
}
});
progress.start(2 * dataSources.size());
int processed = 0;
for (Content dataSource : dataSources) {
if (isCancelled()) {
logger.log(Level.INFO, "Task scheduling thread cancelled");
return null;
}
final String inputName = dataSource.getName();
IngestJob ingestJob = new IngestJob(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace);
DataSourceIngestTask ingestJob = new DataSourceIngestTask(IngestManager.this.getNextDataSourceTaskId(), dataSource, moduleTemplates, processUnallocatedSpace);
List<IngestModuleError> errors = ingestJob.startUpIngestPipelines();
if (!errors.isEmpty()) {
// RJCTODO: Log all errors. Provide a list of all of the modules
// that failed.
// RJCTODO: Log all errors, not just the first one. Provide a list of all of the modules that failed.
MessageNotifyUtil.Message.error(
"Failed to load " + errors.get(0).getModuleDisplayName() + " ingest module.\n\n"
+ "No ingest modules will be run. Please disable the module "
@ -423,13 +441,11 @@ public class IngestManager {
ingestJobs.put(ingestJob.getId(), ingestJob);
// Queue the data source ingest tasks for the ingest job.
logger.log(Level.INFO, "Queueing data source tasks: {0}", ingestJob);
progress.progress("DataSource Ingest" + " " + inputName, processed);
scheduler.getDataSourceScheduler().schedule(ingestJob);
progress.progress("DataSource Ingest" + " " + inputName, ++processed);
// Queue the file ingest tasks for the ingest job.
logger.log(Level.INFO, "Queuing file ingest tasks: {0}", ingestJob);
progress.progress("File Ingest" + " " + inputName, processed);
scheduler.getFileScheduler().scheduleIngestOfFiles(ingestJob);
progress.progress("File Ingest" + " " + inputName, ++processed);
@ -443,25 +459,17 @@ public class IngestManager {
try {
super.get();
} catch (CancellationException | InterruptedException ex) {
handleInterruption(ex);
// IngestManager.stopAll() will dispose of all tasks.
} catch (Exception ex) {
logger.log(Level.SEVERE, "Error while enqueing files. ", ex);
handleInterruption(ex);
logger.log(Level.SEVERE, "Error while scheduling ingest jobs", ex);
// RJCTODO: On EDT, report error, cannot dump all tasks since multiple data source tasks can be submitted. Would get partial results either way.
} finally {
if (this.isCancelled()) {
handleInterruption(new Exception());
} else {
if (!isCancelled()) {
startAll();
}
progress.finish();
}
}
private void handleInterruption(Exception ex) {
// RJCTODO: This seems broken, should empty only for current job?
scheduler.getFileScheduler().empty();
scheduler.getDataSourceScheduler().empty();
}
}
/**
@ -481,10 +489,9 @@ public class IngestManager {
protected Void doInBackground() throws Exception {
logger.log(Level.INFO, String.format("Data source ingest thread {0} started", this.id));
// Set up a progress bar with cancel capability. This is one of two
// ways that the worker can be canceled. The other way is via a call
// to IngestManager.stopAll().
final String displayName = "Data Source";
// Set up a progress bar that can be used to cancel all of the
// ingest jobs currently being performed.
final String displayName = "Data source ingest"; // RJCTODO: Need reset
progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
@ -492,7 +499,8 @@ public class IngestManager {
if (progress != null) {
progress.setDisplayName(displayName + " (Cancelling...)");
}
return DataSourceTaskWorker.this.cancel(true);
IngestManager.getDefault().stopAll();
return true;
}
});
progress.start();
@ -505,9 +513,9 @@ public class IngestManager {
return null;
}
IngestJob ingestJob = scheduler.next();
DataSourceIngestPipeline pipeline = ingestJob.getDataSourceIngestPipelineForThread(this.id);
pipeline.ingestDataSource(this, this.progress);
DataSourceIngestTask ingestJob = scheduler.next();
DataSourceIngestTask.DataSourceIngestPipeline pipeline = ingestJob.getDataSourceIngestPipelineForThread(this.id);
pipeline.process(this, this.progress);
}
logger.log(Level.INFO, "Data source ingest thread {0} completed", this.id);
@ -549,10 +557,9 @@ public class IngestManager {
protected Object doInBackground() throws Exception {
logger.log(Level.INFO, String.format("File ingest thread {0} started", this.id));
// Set up a progress bar with cancel capability. This is one of two ways
// that the worker can be canceled. The other way is via a call to
// IngestManager.stopAll().
final String displayName = "File Ingest";
// Set up a progress bar that can be used to cancel all of the
// ingest jobs currently being performed.
final String displayName = "File ingest";
progress = ProgressHandleFactory.createHandle(displayName, new Cancellable() {
@Override
public boolean cancel() {
@ -560,7 +567,8 @@ public class IngestManager {
if (progress != null) {
progress.setDisplayName(displayName + " (Cancelling...)");
}
return FileTaskWorker.this.cancel(true);
IngestManager.getDefault().stopAll();
return true;
}
});
progress.start();
@ -580,8 +588,8 @@ public class IngestManager {
IngestScheduler.FileScheduler.FileTask task = fileScheduler.next();
AbstractFile file = task.getFile();
progress.progress(file.getName(), processedFiles);
FileIngestPipeline pipeline = task.getJob().getFileIngestPipelineForThread(this.id);
pipeline.ingestFile(file);
DataSourceIngestTask.FileIngestPipeline pipeline = task.getParent().getFileIngestPipelineForThread(this.id);
pipeline.process(file);
// Update the progress bar.
int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst();

View File

@ -365,7 +365,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
}
private void init() {
List<IngestModuleFactory> moduleFactories = IngestModuleLoader.getDefault().getIngestModuleFactories();
List<IngestModuleFactory> moduleFactories = IngestModuleLoader.getInstance().getIngestModuleFactories();
for (IngestModuleFactory factory : moduleFactories) {
groupings.put(factory.getModuleDisplayName(), new HashMap<String, List<IngestMessageGroup>>());
}

View File

@ -23,76 +23,14 @@ package org.sleuthkit.autopsy.ingest;
*/
public interface IngestModule {
public enum ResultCode { // RJCTODO: Do we really want to have this
public enum ResultCode {
OK,
ERROR,
@Deprecated
NOT_SET
};
// RJCTODO: Update
/**
* Invoked to allow an ingest module to set up internal data structures and
* acquire any private resources it will need during an ingest job. There
* will usually be more than one instance of a module working on an ingest
* job, but it is guaranteed that there will be no more than one instance of
* the module per thread. If these instances must share resources, the
* modules are responsible for synchronizing access to the shared resources
* and doing reference counting as required to release the resources
* correctly.
* <p>
* A module that uses the scheduling service to schedule additional
* processing needs to supply the ingest job ID passed to this method to the
* scheduler. For example, a module that extracts files from an archive file
* should schedule ingest of those files using the ingest job ID to ensure
* that the files will be processed as part of the same ingest job.
* <p>
* An ingest module that does not require initialization should extend the
* IngestModuleAdapter class to get a default implementation of this method
* that saves the ingest job id.
*
* @param ingestJobId Identifier for the ingest job.
* @param ingestOptions Module options for the ingest job.
*/
void startUp(IngestModuleContext context);
// RJCTODO: Write header comment, make sure to mention "one module instance per thread"
void startUp(IngestModuleContext context) throws Exception;
/**
* RJCTODO
*/
IngestModuleContext getContext();
// RJCTODO: Update
/**
* Invoked when an ingest job is completed, before the module instance is
* discarded. The module should respond by doing things like releasing
* private resources, submitting final results, and posting a final ingest
* message.
* <p>
* An ingest module that does not need to do anything when the ingest job
* completes should extend the IngestModuleAdapter class to get a default
* implementation of this method that does nothing.
*/
/**
* Invoked when an ingest job is canceled or otherwise terminated early,
* before the module instance is discarded. The module should respond by
* doing things like releasing private resources, discarding partial
* results, and posting a stopped ingest message.
* <p>
* An ingest module that does not need to do anything when the ingest job is
* canceled should extend the IngestModuleAdapter class to get a default
* implementation of this method that does nothing.
*/
/**
* Invoked after complete() or stop() is called to determine if the module
* has finished responding to the termination request. The module instance
* will be discarded when this method returns true.
* <p>
* An ingest module that does not need to do anything when the ingest job is
* completed or canceled should extend the IngestModuleAdapter class to get
* a default implementation of this method that returns true.
*
* @return True if the module is finished, false otherwise.
*/
// RJCTODO: Write header comment, make sure to mention "one module instance per thread"
void shutDown(boolean ingestJobWasCancelled);
}

View File

@ -24,23 +24,11 @@ package org.sleuthkit.autopsy.ingest;
*/
public abstract class IngestModuleAdapter implements IngestModule {
private IngestModuleContext context;
@Override
public void startUp(IngestModuleContext context) {
this.context = context;
}
@Override
public IngestModuleContext getContext() {
return context;
public void startUp(IngestModuleContext context) throws Exception {
}
@Override
public void shutDown(boolean ingestJobCancelled) {
}
protected void setContext(IngestModuleContext context) {
this.context = context;
}
}

View File

@ -34,14 +34,14 @@ import org.sleuthkit.datamodel.SleuthkitCase;
*/
public final class IngestModuleContext {
private final IngestJob ingestJob;
private final DataSourceIngestTask ingestJob;
private final IngestModuleFactory moduleFactory;
private final IngestManager ingestManager;
private final IngestScheduler scheduler;
private final Case autopsyCase;
private final SleuthkitCase sleuthkitCase;
IngestModuleContext(IngestJob ingestJob, IngestModuleFactory moduleFactory) {
IngestModuleContext(DataSourceIngestTask ingestJob, IngestModuleFactory moduleFactory) {
this.ingestJob = ingestJob;
this.moduleFactory = moduleFactory;
ingestManager = IngestManager.getDefault();
@ -133,10 +133,4 @@ public final class IngestModuleContext {
public void logError(Class moduleClass, String message, Throwable ex) {
Logger.getLogger(moduleClass.getName()).log(Level.SEVERE, message, ex);
}
// RJCTODO: Leave public or create blackboard attribute factory methods,
// perhaps as many as eleven. End goal is for this to be package
public String getModuleDisplayName() {
return this.moduleFactory.getModuleDisplayName();
}
}

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.util.ArrayList;
@ -26,38 +25,36 @@ import java.util.logging.Level;
import org.openide.util.Lookup;
import org.sleuthkit.autopsy.coreutils.Logger;
// RJCTODO: Comment
/**
* Looks up loaded ingest module factories using NetBean global lookup.
*/
final class IngestModuleLoader {
private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName());
private static IngestModuleLoader instance;
private ArrayList<IngestModuleFactory> moduleFactories = new ArrayList<>();
private final List<IngestModuleFactory> moduleFactories = new ArrayList<>();
private IngestModuleLoader() {
lookUpIngestModuleFactories();
}
synchronized static IngestModuleLoader getDefault() {
synchronized static IngestModuleLoader getInstance() {
if (instance == null) {
Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance");
instance = new IngestModuleLoader();
instance.init();
}
return instance;
}
private void init() {
// RJCTODO: Add code to listen to changes in the collections, possibly restore listener code...
// RJCTODO: Since we were going to overwrite pipeline config every time and we are going to move the code modules
// into this package, we can simply handle the module ordering here, possibly just directly instantiating the core
// modules.
// RJCTODO: Make sure that sample modules are excluded
Logger logger = Logger.getLogger(IngestModuleLoader.class.getName());
List<IngestModuleFactory> getIngestModuleFactories() {
return new ArrayList<>(moduleFactories);
}
private void lookUpIngestModuleFactories() {
// RJCTODO: Possibly add code to listen to changes in the collection and restore listener code...
Collection<? extends IngestModuleFactory> factories = Lookup.getDefault().lookupAll(IngestModuleFactory.class);
for (IngestModuleFactory factory : factories) {
logger.log(Level.INFO, "Loaded ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()});
logger.log(Level.INFO, "Found ingest module factory: name = {0}, version = {1}", new Object[]{factory.getModuleDisplayName(), factory.getModuleVersionNumber()});
moduleFactories.add(factory);
}
}
List<IngestModuleFactory> getIngestModuleFactories() {
return moduleFactories;
}
}

View File

@ -27,6 +27,7 @@ import java.io.Serializable;
*/
public interface IngestModuleSettings extends Serializable {
// RJCTODO: Keep this as a shell if that works, otherwise go with Serializable
/**
* Determines whether the per ingest job options are valid.
*

View File

@ -36,8 +36,9 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil;
/**
* Monitor health of the system and stop ingest if necessary
*/
final class IngestMonitor {
public final class IngestMonitor {
public static final int DISK_FREE_SPACE_UNKNOWN = -1; // RJCTODO: This is ugly
private static final int INITIAL_INTERVAL_MS = 60000; //1 min.
private final Logger logger = Logger.getLogger(IngestMonitor.class.getName());
private Timer timer;

View File

@ -0,0 +1,129 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
/**
* Provides data source and file ingest pipeline configurations as ordered lists
* of ingest module class names. The order of the module class names indicates
* the desired sequence of ingest module instances in an ingest modules
* pipeline.
*/
final class IngestPipelinesConfiguration {
private static final Logger logger = Logger.getLogger(IngestPipelinesConfiguration.class.getName());
private final static String PIPELINES_CONFIG_FILE = "pipeline_config.xml";
private final static String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd";
private static final String XML_PIPELINE_ELEM = "PIPELINE";
private static final String XML_PIPELINE_TYPE_ATTR = "type";
private final static String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis";
private final static String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis";
private static final String XML_MODULE_ELEM = "MODULE";
private static final String XML_MODULE_CLASS_NAME_ATTR = "location";
private static IngestPipelinesConfiguration instance;
private final List<String> dataSourceIngestPipelineConfig = new ArrayList<>();
private final List<String> fileIngestPipelineConfig = new ArrayList<>();
private IngestPipelinesConfiguration() {
readPipelinesConfigurationFile();
}
synchronized static IngestPipelinesConfiguration getInstance() {
if (instance == null) {
Logger.getLogger(IngestModuleLoader.class.getName()).log(Level.INFO, "Creating ingest module loader instance");
instance = new IngestPipelinesConfiguration();
}
return instance;
}
List<String> getDataSourceIngestPipelineConfig() {
return new ArrayList<>(dataSourceIngestPipelineConfig);
}
List<String> getFileIngestPipelineConfig() {
return new ArrayList<>(fileIngestPipelineConfig);
}
private void readPipelinesConfigurationFile() {
String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE;
Document doc = XMLUtil.loadDoc(IngestModuleLoader.class, configFilePath, PIPELINES_CONFIG_FILE_XSD);
if (doc == null) {
return;
}
Element rootElement = doc.getDocumentElement();
if (rootElement == null) {
logger.log(Level.SEVERE, "Invalid pipelines config file");
return;
}
NodeList pipelineElements = rootElement.getElementsByTagName(XML_PIPELINE_ELEM);
int numPipelines = pipelineElements.getLength();
if (numPipelines < 1 || numPipelines > 2) {
logger.log(Level.SEVERE, "Invalid pipelines config file");
return;
}
List<String> pipelineConfig = null;
for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) {
Element pipelineElement = (Element) pipelineElements.item(pipelineNum);
String pipelineTypeAttr = pipelineElement.getAttribute(XML_PIPELINE_TYPE_ATTR);
if (pipelineTypeAttr != null) {
switch (pipelineTypeAttr) {
case DATA_SOURCE_INGEST_PIPELINE_TYPE:
pipelineConfig = dataSourceIngestPipelineConfig;
break;
case FILE_INGEST_PIPELINE_TYPE:
pipelineConfig = fileIngestPipelineConfig;
break;
default:
logger.log(Level.SEVERE, "Invalid pipelines config file");
return;
}
}
// Create an ordered list of class names. The sequence of class
// names defines the sequence of modules in the pipeline.
if (pipelineConfig != null) {
NodeList modulesElems = pipelineElement.getElementsByTagName(XML_MODULE_ELEM);
int numModules = modulesElems.getLength();
if (numModules == 0) {
break;
}
for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) {
Element moduleElement = (Element) modulesElems.item(moduleNum);
final String moduleClassName = moduleElement.getAttribute(XML_MODULE_CLASS_NAME_ATTR);
if (moduleClassName != null) {
pipelineConfig.add(moduleClassName);
}
}
}
}
}
}

View File

@ -148,7 +148,7 @@ final class IngestScheduler {
return sb.toString();
}
synchronized void scheduleIngestOfFiles(IngestJob dataSourceTask) {
synchronized void scheduleIngestOfFiles(DataSourceIngestTask dataSourceTask) {
// RJCTODO: This should go to the ingest manager as the job manager?
// Save the data source task to manage its pipelines.
//dataSourceTasks.put(dataSourceTask.getId(), dataSourceTask);
@ -212,7 +212,7 @@ final class IngestScheduler {
* @param originalContext original content schedule context that was used
* to schedule the parent origin content, with the modules, settings, etc.
*/
synchronized void scheduleIngestOfDerivedFile(IngestJob ingestJob, AbstractFile file) {
synchronized void scheduleIngestOfDerivedFile(DataSourceIngestTask ingestJob, AbstractFile file) {
FileTask fileTask = new FileTask(file, ingestJob);
if (shouldEnqueueTask(fileTask)) {
fileTasks.addFirst(fileTask);
@ -338,7 +338,7 @@ final class IngestScheduler {
for (Content c : children) {
if (c instanceof AbstractFile) {
AbstractFile childFile = (AbstractFile) c;
FileTask childTask = new FileTask(childFile, parentTask.getJob());
FileTask childTask = new FileTask(childFile, parentTask.getParent());
if (childFile.hasChildren()) {
this.directoryTasks.add(childTask);
@ -373,13 +373,13 @@ final class IngestScheduler {
final Set<Content> contentSet = new HashSet<>();
for (FileTask task : rootDirectoryTasks) {
contentSet.add(task.getJob().getDataSource());
contentSet.add(task.getParent().getDataSource());
}
for (FileTask task : directoryTasks) {
contentSet.add(task.getJob().getDataSource());
contentSet.add(task.getParent().getDataSource());
}
for (FileTask task : fileTasks) {
contentSet.add(task.getJob().getDataSource());
contentSet.add(task.getParent().getDataSource());
}
return new ArrayList<>(contentSet);
@ -402,7 +402,7 @@ final class IngestScheduler {
final AbstractFile aFile = processTask.file;
//if it's unalloc file, skip if so scheduled
if (processTask.getJob().getProcessUnallocatedSpace() == false
if (processTask.getParent().shouldProcessUnallocatedSpace() == false
&& aFile.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS //unalloc files
)) {
return false;
@ -462,15 +462,15 @@ final class IngestScheduler {
*/
static class FileTask {
private final AbstractFile file;
private final IngestJob ingetsJob;
private final DataSourceIngestTask task;
public FileTask(AbstractFile file, IngestJob dataSourceTask) {
public FileTask(AbstractFile file, DataSourceIngestTask task) {
this.file = file;
this.ingetsJob = dataSourceTask;
this.task = task;
}
public IngestJob getJob() {
return ingetsJob;
public DataSourceIngestTask getParent() { // RJCTODO: Provide wrappers to get rid of train-style calls
return task;
}
public AbstractFile getFile() {
@ -509,8 +509,8 @@ final class IngestScheduler {
if (this.file != other.file && (this.file == null || !this.file.equals(other.file))) {
return false;
}
IngestJob thisTask = this.getJob();
IngestJob otherTask = other.getJob();
DataSourceIngestTask thisTask = this.getParent();
DataSourceIngestTask otherTask = other.getParent();
if (thisTask != otherTask
&& (thisTask == null || !thisTask.equals(otherTask))) {
@ -766,15 +766,15 @@ final class IngestScheduler {
/**
* DataSourceScheduler ingest scheduler
*/
static class DataSourceScheduler implements Iterator<IngestJob> {
static class DataSourceScheduler implements Iterator<DataSourceIngestTask> {
private LinkedList<IngestJob> tasks;
private LinkedList<DataSourceIngestTask> tasks;
DataSourceScheduler() {
tasks = new LinkedList<>();
}
synchronized void schedule(IngestJob task) {
synchronized void schedule(DataSourceIngestTask task) {
try {
if (task.getDataSource().getParent() != null) {
//only accepting parent-less content objects (Image, parentless VirtualDirectory)
@ -790,12 +790,12 @@ final class IngestScheduler {
}
@Override
public synchronized IngestJob next() throws IllegalStateException {
public synchronized DataSourceIngestTask next() throws IllegalStateException {
if (!hasNext()) {
throw new IllegalStateException("There is no data source tasks in the queue, check hasNext()");
}
final IngestJob ret = tasks.pollFirst();
final DataSourceIngestTask ret = tasks.pollFirst();
return ret;
}
@ -806,7 +806,7 @@ final class IngestScheduler {
*/
synchronized List<org.sleuthkit.datamodel.Content> getContents() {
List<org.sleuthkit.datamodel.Content> contents = new ArrayList<org.sleuthkit.datamodel.Content>();
for (IngestJob task : tasks) {
for (DataSourceIngestTask task : tasks) {
contents.add(task.getDataSource());
}
return contents;
@ -834,7 +834,7 @@ final class IngestScheduler {
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("DataSourceQueue, size: ").append(getCount());
for (IngestJob task : tasks) {
for (DataSourceIngestTask task : tasks) {
sb.append(task.toString()).append(" ");
}
return sb.toString();

View File

@ -18,10 +18,14 @@
*/
package org.sleuthkit.autopsy.ingest;
import java.io.File;
import java.util.Collection;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.SleuthkitCase;
/**
@ -30,10 +34,10 @@ import org.sleuthkit.datamodel.SleuthkitCase;
* singleton instance.
*/
public final class IngestServices {
public static final int DISK_FREE_SPACE_UNKNOWN = -1;
public static final int DISK_FREE_SPACE_UNKNOWN = -1; // RJCTODO: Move this back to the monitor or ingest manager? It is used here...
private static final Logger logger = Logger.getLogger(IngestServices.class.getName());
private IngestManager manager;
private Logger logger = Logger.getLogger(IngestServices.class.getName());
private static IngestServices instance;
private IngestServices() {
@ -41,9 +45,9 @@ public final class IngestServices {
}
/**
* Get handle to singletone module services
* Get the ingest services.
*
* @return the services handle
* @return The ingest services singleton.
*/
public static synchronized IngestServices getDefault() {
if (instance == null) {
@ -53,34 +57,38 @@ public final class IngestServices {
}
/**
* Get access to the current Case handle. Note: When storing the Case
* database handle as a member variable in a module, this method needs to be
* called within the module's init() method and the member variable needs to
* be updated at each init(), to ensure the correct Case handle is being
* used if the Case is changed.
* Get the current Autopsy case.
*
* @return current Case
* @return The current case.
*/
public Case getCurrentCase() {
return Case.getCurrentCase();
}
/**
* Get access to the current Case database handle. Like storing the Case
* handle, call this method and update member variables for each call to the
* module's init() method to ensure it is correct.
* Get the current SleuthKit case. The SleuthKit case is the case database.
*
* @return current Case database
* @return The current case database.
*/
public SleuthkitCase getCurrentSleuthkitCaseDb() {
return Case.getCurrentCase().getSleuthkitCase();
}
/**
* Post ingest message to the inbox. This should be done for analysis
* messages.
* Get a logger that incorporates the display name of an ingest module in
* messages written to the Autopsy log files.
*
* @param message ingest message to be posted by ingest module
* @param moduleClassName The display name of the ingest module.
* @return The custom logger for the ingest module.
*/
public Logger getLogger(String moduleDisplayName) {
return Logger.getLogger(moduleDisplayName);
}
/**
* Post message to the ingest messages in box.
*
* @param message An ingest message
*/
public void postMessage(final IngestMessage message) {
manager.postIngestMessage(message);
@ -120,7 +128,7 @@ public final class IngestServices {
IngestManager.fireModuleContentEvent(moduleContentEvent);
}
// RJCTODO:
// RJCTODO: This can stay in the context since it is context (pipeline) specific
/**
* Schedule a new file for ingest with the same settings as the file being
* analyzed. This is used, for example, when opening an archive file. File
@ -143,20 +151,4 @@ public final class IngestServices {
public long getFreeDiskSpace() {
return manager.getFreeDiskSpace();
}
// RJCTODO
/**
* Facility for a file ingest module to check a return value from a
* previously run file ingest module that executed for the same file. The
* module return value can be used as a guideline to skip processing the
* file
*
* @param moduleName registered module name of the module to check the
* return value of
* @return the return value of the previously executed module for the
* currently processed file in the file ingest pipeline
*/
// public IngestModule.ResultCode getAbstractFileModuleResult(String moduleName) {
// return manager.getAbstractFileModuleResult(moduleName);
// }
}
}

View File

@ -38,7 +38,6 @@ import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleTempApiShim;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
@ -55,7 +54,7 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
* files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact.
*/
public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule, IngestModuleTempApiShim {
public final class ExifParserFileIngestModule extends IngestModuleAdapter implements FileIngestModule {
private IngestServices services;
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
@ -66,7 +65,8 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) {
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception {
super.startUp(context);
services = IngestServices.getDefault();
logger.log(Level.INFO, "init() {0}", this.toString());
filesProcessed = 0;

View File

@ -58,7 +58,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) {
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception {
super.startUp(context);
services = IngestServices.getDefault();
FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault();
SigTypeToExtMap = xmlLoader.load();

View File

@ -44,6 +44,7 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.datamodel.HashInfo;
// RJCTODO: Storeis for a) peristing context-sensitive module settings and b) adapt core modules to use module settings (more important)
public class HashDbIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final Logger logger = Logger.getLogger(HashDbIngestModule.class.getName());
private static final int MAX_COMMENT_SIZE = 500;
@ -62,7 +63,8 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) {
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) throws Exception {
super.startUp(context);
services = IngestServices.getDefault();
skCase = Case.getCurrentCase().getSleuthkitCase();

View File

@ -298,8 +298,8 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme
*
*/
@Override
public void startUp(IngestModuleContext context) {
setContext(context);
public void startUp(IngestModuleContext context) throws Exception {
super.startUp(context);
logger.log(Level.INFO, "init()");
services = IngestServices.getDefault();
initialized = false;

View File

@ -19,8 +19,6 @@
package org.sleuthkit.autopsy.keywordsearch;
import java.io.Serializable;
import javax.swing.JPanel;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
@ -48,7 +46,7 @@ public class KeywordSearchModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleDescription() {
return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription");
return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleDescription");
}
@Override

View File

@ -54,26 +54,26 @@ import org.sleuthkit.datamodel.TskData;
*/
class Chrome extends Extract {
private static final Logger logger = Logger.getLogger(Chrome.class.getName());
private static final String historyQuery = "SELECT urls.url, urls.title, urls.visit_count, urls.typed_count, "
+ "last_visit_time, urls.hidden, visits.visit_time, (SELECT urls.url FROM urls WHERE urls.id=visits.url) as from_visit, visits.transition FROM urls, visits WHERE urls.id = visits.url";
private static final String cookieQuery = "select name, value, host_key, expires_utc,last_access_utc, creation_utc from cookies";
private static final String downloadQuery = "select full_path, url, start_time, received_bytes from downloads";
private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id";
private static final String loginQuery = "select origin_url, username_value, signon_realm from logins";
private final Logger logger = Logger.getLogger(this.getClass().getName());
Chrome() {
moduleName = "Chrome";
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) {
dataFound = false;
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
this.getLogin(dataSource, controller);
this.getDownload(dataSource, controller);
this.getHistory(dataSource, statusHelper);
this.getBookmark(dataSource, statusHelper);
this.getCookie(dataSource, statusHelper);
this.getLogin(dataSource, statusHelper);
this.getDownload(dataSource, statusHelper);
}
/**

View File

@ -35,19 +35,26 @@ import org.sleuthkit.datamodel.*;
abstract class Extract {
private static final Logger logger = Logger.getLogger(Extract.class.getName());
protected Case currentCase = Case.getCurrentCase(); // RJCTODO: Fix this
protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); // RJCTODO: Fix this
protected Case currentCase = Case.getCurrentCase();
protected SleuthkitCase tskCase = currentCase.getSleuthkitCase();
public final Logger logger = Logger.getLogger(this.getClass().getName());
private final ArrayList<String> errorMessages = new ArrayList<>();
String moduleName = ""; // RJCTODO: Fix this
boolean dataFound = false; // RJCTODO: Fix this
String moduleName = "";
boolean dataFound = false;
Extract() {
dataFound = false;
}
// RJCTODO: Consider renaming
abstract void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller);
void init() throws Exception {
}
abstract void process(Content dataSource, DataSourceIngestModuleStatusHelper controller);
void complete() {
}
void stop() {
}
/**
* Returns a List of string error messages from the inheriting class
@ -82,7 +89,7 @@ abstract class Extract {
BlackboardArtifact bbart = content.newArtifact(type);
bbart.addAttributes(bbattributes);
} catch (TskException ex) {
logger.log(Level.SEVERE, "Error while trying to add an artifact: {0}", ex); // RJCTODO: Add extracter name
logger.log(Level.SEVERE, "Error while trying to add an artifact: {0}", ex);
}
}
@ -138,26 +145,15 @@ abstract class Extract {
return list;
}
void complete() {
}
void stop() {
}
/**
* Returns the name of the inheriting class
*
* @return Gets the moduleName set in the moduleName data member
*/
protected String getName() { // RJCTODO: Fix this
protected String getName() {
return moduleName;
}
// RJCTODO: Wire this in.
protected void setDataFound(boolean foundData) {
dataFound = foundData;
}
public boolean foundData() {
return dataFound;
}

View File

@ -65,7 +65,7 @@ import org.sleuthkit.datamodel.*;
*/
class ExtractIE extends Extract {
private static final Logger logger = Logger.getLogger(ExtractIE.class.getName());
private IngestServices services;
private IngestServices services = IngestServices.getDefault();
private String moduleTempResultsDir;
private String PASCO_LIB_PATH;
private String JAVA_PATH;
@ -79,7 +79,7 @@ class ExtractIE extends Extract {
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
dataFound = false;
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
@ -93,7 +93,7 @@ class ExtractIE extends Extract {
*/
private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> favoritesFiles = null;
List<AbstractFile> favoritesFiles;
try {
favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites");
} catch (TskCoreException ex) {
@ -125,7 +125,7 @@ class ExtractIE extends Extract {
datetime = Long.valueOf(Tempdate);
String domain = Util.extractDomain(url);
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TITLE.getTypeID(), "RecentActivity", name));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), "RecentActivity", datetime));
@ -172,7 +172,7 @@ class ExtractIE extends Extract {
*/
private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> cookiesFiles = null;
List<AbstractFile> cookiesFiles;
try {
cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies");
} catch (TskCoreException ex) {
@ -213,7 +213,7 @@ class ExtractIE extends Extract {
datetime = Long.valueOf(tempDate);
String domain = Util.extractDomain(url);
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", url));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", datetime));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", (name != null) ? name : ""));
@ -462,19 +462,11 @@ class ExtractIE extends Extract {
fileScanner.close();
}
// @Override
// public void init() {
// services = IngestServices.getDefault();
// }
@Override
public void stop() {
if (execPasco != null) {
execPasco.stop();
execPasco = null;
}
//call regular cleanup from complete() method
complete();
}
}

View File

@ -102,12 +102,6 @@ class ExtractRegistry extends Extract {
}
}
// @Override
// public String getVersion() {
// return MODULE_VERSION;
// }
/**
* Search for the registry hives on the system.
* @param dataSource Data source to search for hives in.
@ -532,7 +526,7 @@ class ExtractRegistry extends Extract {
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
analyzeRegistryFiles(dataSource, controller);
}

View File

@ -2,7 +2,7 @@
*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Copyright 2012-2014 Basis Technology Corp.
*
* Copyright 2012 42six Solutions.
* Contact: aebadirad <at> 42six <dot> com
@ -89,7 +89,7 @@ class ExtractUSB {
* @throws IOException
*/
private void loadDeviceMap() throws FileNotFoundException, IOException {
devices = new HashMap<String, USBInfo>();
devices = new HashMap<>();
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile);
try (Scanner dat = new Scanner(new FileInputStream(new java.io.File(PlatformUtil.getUserConfigDirectory() + File.separator + "USB_DATA.txt")))) {
String line = dat.nextLine();
@ -152,6 +152,7 @@ class ExtractUSB {
return product;
}
@Override
public String toString() {
return vendor + product;
}

View File

@ -50,7 +50,6 @@ import org.sleuthkit.datamodel.TskCoreException;
*/
class Firefox extends Extract {
private static final Logger logger = Logger.getLogger(Firefox.class.getName());
private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0";
private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies";
private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies";
@ -63,7 +62,7 @@ class Firefox extends Extract {
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
dataFound = false;
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);

View File

@ -43,18 +43,22 @@ import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
public final class RAImageIngestModule extends IngestModuleAdapter implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName());
private IngestServices services;
private static int messageId = 0;
private final List<Extract> extracters = new ArrayList<>();
private final List<Extract> browserExtracters = new ArrayList<>();
private IngestServices services;
private StringBuilder subCompleted = new StringBuilder();
private ArrayList<Extract> extracters;
private List<Extract> browserExtracters;
RAImageIngestModule() {
}
synchronized int getNextMessageId() {
return ++messageId;
}
@Override
public ResultCode process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName()));
services.postMessage(IngestMessage.createMessage(getNextMessageId(), MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName()));
controller.switchToDeterminate(extracters.size());
controller.progress(0);
@ -68,7 +72,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
}
try {
extracter.extractRecentActivity(dataSource, controller);
extracter.process(dataSource, controller);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex);
subCompleted.append(extracter.getName()).append(" failed - see log for details <br>");
@ -99,7 +103,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
errorMessage.append("<p>No errors encountered.</p>");
errorMsgSubject = "No errors reported";
}
final IngestMessage msg = IngestMessage.createMessage(++messageId, msgLevel, RecentActivityExtracterModuleFactory.getModuleName(), "Finished " + dataSource.getName() + " - " + errorMsgSubject, errorMessage.toString());
final IngestMessage msg = IngestMessage.createMessage(getNextMessageId(), msgLevel, RecentActivityExtracterModuleFactory.getModuleName(), "Finished " + dataSource.getName() + " - " + errorMsgSubject, errorMessage.toString());
services.postMessage(msg);
StringBuilder historyMsg = new StringBuilder();
@ -110,7 +114,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
historyMsg.append("</li>");
}
historyMsg.append("</ul>");
final IngestMessage inboxMsg = IngestMessage.createMessage(++messageId, MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), dataSource.getName() + " - Browser Results", historyMsg.toString());
final IngestMessage inboxMsg = IngestMessage.createMessage(getNextMessageId(), MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), dataSource.getName() + " - Browser Results", historyMsg.toString());
services.postMessage(inboxMsg);
return ResultCode.OK;
@ -135,27 +139,23 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
}
@Override
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) {
extracters = new ArrayList<>();
browserExtracters = new ArrayList<>();
public void startUp(org.sleuthkit.autopsy.ingest.IngestModuleContext context) Exception {
super.startUp(context);
services = IngestServices.getDefault();
final Extract registry = new ExtractRegistry();
final Extract iexplore = new ExtractIE();
final Extract recentDocuments = new RecentDocumentsByLnk();
final Extract chrome = new Chrome();
final Extract firefox = new Firefox();
final Extract SEUQA = new SearchEngineURLQueryAnalyzer();
Extract registry = new ExtractRegistry();
Extract iexplore = new ExtractIE();
Extract recentDocuments = new RecentDocumentsByLnk();
Extract chrome = new Chrome();
Extract firefox = new Firefox();
Extract SEUQA = new SearchEngineURLQueryAnalyzer();
extracters.add(chrome);
extracters.add(firefox);
extracters.add(iexplore);
extracters.add(recentDocuments);
// this needs to run after the web browser modules
extracters.add(SEUQA);
// this runs last because it is slowest
extracters.add(registry);
extracters.add(SEUQA); // this needs to run after the web browser modules
extracters.add(registry); // this runs last because it is slowest // RJCTODO: Why?
browserExtracters.add(chrome);
browserExtracters.add(firefox);
@ -163,9 +163,10 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da
for (Extract extracter : extracters) {
try {
// extracter.init(); // RJCTODO
extracter.init();
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception during init() of " + extracter.getName(), ex);
throw new IngestModuleException(ex.getMessage());
}
}
}

View File

@ -47,7 +47,6 @@ import org.sleuthkit.datamodel.*;
class RecentDocumentsByLnk extends Extract {
private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName());
private IngestServices services;
final private static String MODULE_VERSION = "1.0";
/**
* Find the documents that Windows stores about recent documents and make artifacts.
@ -57,11 +56,11 @@ class RecentDocumentsByLnk extends Extract {
private void getRecentDocuments(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<AbstractFile> recentFiles = null;
List<AbstractFile> recentFiles;
try {
recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error searching for .lnk files.");
logger.log(Level.WARNING, "Error searching for .lnk files.", ex);
this.addErrorMessage(this.getName() + ": Error getting lnk Files.");
return;
}
@ -106,7 +105,7 @@ class RecentDocumentsByLnk extends Extract {
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
dataFound = false;
this.getRecentDocuments(dataSource, controller);
}

View File

@ -60,7 +60,6 @@ import org.xml.sax.SAXException;
*/
class SearchEngineURLQueryAnalyzer extends Extract {
private static final Logger logger = Logger.getLogger(SearchEngineURLQueryAnalyzer.class.getName());
private static final String XMLFILE = "SEUQAMappings.xml";
private static final String XSDFILE = "SearchEngineSchema.xsd";
private static String[] searchEngineNames;
@ -123,7 +122,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
String EngineName = nnm.getNamedItem("engine").getNodeValue();
String EnginedomainSubstring = nnm.getNamedItem("domainSubstring").getNodeValue();
Map<String, String> splits = new HashMap<String, String>();
Map<String, String> splits = new HashMap<>();
NodeList listSplits = xmlinput.getElementsByTagName("splitToken");
for (int k = 0; k < listSplits.getLength(); k++) {
@ -307,23 +306,21 @@ class SearchEngineURLQueryAnalyzer extends Extract {
}
@Override
public void extractRecentActivity(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) {
this.getURLs(dataSource, controller);
logger.info("Search Engine stats: \n" + getTotals());
}
// RJCTODO: Move to ctor or something
// @Override
// public void init(IngestModuleInit initContext) {
// try {
// services = IngestServices.getDefault();
// PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE);
// init2();
// } catch (IOException e) {
// logger.log(Level.SEVERE, "Unable to find " + XMLFILE, e);
// }
// }
//
@Override
void init() throws Exception {
try {
PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE);
init2();
} catch (IOException e) {
logger.log(Level.SEVERE, "Unable to find " + XMLFILE, e);
}
}
private void init2() {
try {
String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE;
@ -347,4 +344,14 @@ class SearchEngineURLQueryAnalyzer extends Extract {
logger.log(Level.SEVERE, "Unable to parse XML file", sxe);
}
}
@Override
public void complete() {
logger.info("Search Engine URL Query Analyzer has completed.");
}
@Override
public void stop() {
logger.info("Attempted to stop Search Engine URL Query Analyzer, but operation is not supported; skipping...");
}
}

View File

@ -6,6 +6,14 @@
<code-name-base>org.sleuthkit.autopsy.scalpel</code-name-base>
<suite-component/>
<module-dependencies>
<dependency>
<code-name-base>org.openide.util.lookup</code-name-base>
<build-prerequisite/>
<compile-dependency/>
<run-dependency>
<specification-version>8.19.1</specification-version>
</run-dependency>
</dependency>
<dependency>
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
<build-prerequisite/>

View File

@ -18,268 +18,226 @@
*/
package org.sleuthkit.autopsy.scalpel;
//import java.io.File;
//import java.io.IOException;
//import java.util.ArrayList;
//import java.util.List;
//import java.util.logging.Level;
//import org.sleuthkit.autopsy.casemodule.Case;
//import org.sleuthkit.autopsy.coreutils.Logger;
//import org.sleuthkit.autopsy.coreutils.PlatformUtil;
//import org.sleuthkit.autopsy.coreutils.Version;
//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult;
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
//import org.sleuthkit.autopsy.ingest.IngestServices;
//import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
//import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta;
//import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver;
//import org.sleuthkit.autopsy.scalpel.jni.ScalpelException;
//import org.sleuthkit.datamodel.AbstractFile;
//import org.sleuthkit.datamodel.Content;
//import org.sleuthkit.datamodel.FileSystem;
//import org.sleuthkit.datamodel.Image;
//import org.sleuthkit.datamodel.LayoutFile;
//import org.sleuthkit.datamodel.SleuthkitCase;
//import org.sleuthkit.datamodel.TskCoreException;
//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
//import org.sleuthkit.datamodel.TskFileRange;
//import org.sleuthkit.datamodel.Volume;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile.ProcessResult;
import org.sleuthkit.autopsy.ingest.IngestModuleAdapter;
import org.sleuthkit.autopsy.ingest.IngestModuleContext;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelOutputParser.CarvedFileMeta;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelCarver;
import org.sleuthkit.autopsy.scalpel.jni.ScalpelException;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.TskFileRange;
import org.sleuthkit.datamodel.Volume;
/**
* Scalpel carving ingest module
*/
//class ScalpelCarverIngestModule { // extends IngestModuleAbstractFile { // disable autodiscovery for now {
//
// private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName());
//
// private static ScalpelCarverIngestModule instance;
// private final String MODULE_NAME = "Scalpel Carver";
// private final String MODULE_DESCRIPTION = "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree.";
// private final String MODULE_VERSION = Version.getVersion();
// private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver";
// private String moduleOutputDirPath;
// private String configFileName = "scalpel.conf";
// private String configFilePath;
// private boolean initialized = false;
// private ScalpelCarver carver;
//
// private ScalpelCarverIngestModule() {
// ScalpelCarver.init();
// }
//
// // @Override
// public ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile abstractFile) {
//
// if (!initialized) {
// return ProcessResult.OK;
// }
//
// // only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS
// TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType();
// if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
// return ProcessResult.OK;
// }
//
// // create the output directory for this run
// String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId();
// File scalpelOutputDir = new File(scalpelOutputDirPath);
// if (!scalpelOutputDir.exists()) {
// if (!scalpelOutputDir.mkdir()) {
// logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath);
// return ProcessResult.OK;
// }
// }
//
// // find the ID of the parent FileSystem, Volume or Image
// long id = -1;
// Content parent = null;
// try {
// parent = abstractFile.getParent();
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex);
// }
// while (parent != null) {
// if (parent instanceof FileSystem ||
// parent instanceof Volume ||
// parent instanceof Image) {
// id = parent.getId();
// break;
// }
// try {
// parent = parent.getParent();
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex);
// }
// }
//
// // make sure we have a valid systemID
// if (id == -1) {
// logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile.");
// return ProcessResult.OK;
// }
//
// // carve the AbstractFile
// List<CarvedFileMeta> output = null;
// try {
// output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath);
// } catch (ScalpelException ex) {
// logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId());
// return ProcessResult.OK;
// }
//
//
// // get the image's size
// long imageSize = Long.MAX_VALUE;
// try {
//
// imageSize = abstractFile.getImage().getSize();
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "Could not obtain the image's size.");
// }
//
// // add a carved file to the DB for each file that scalpel carved
// SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase();
// List<LayoutFile> carvedFiles = new ArrayList<LayoutFile>(output.size());
// for (CarvedFileMeta carvedFileMeta : output) {
//
// // calculate the byte offset of this carved file
// long byteOffset;
// try {
// byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart());
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")");
// break;
// }
//
// // get the size of the carved file
// long size = carvedFileMeta.getByteLength();
//
// // create the list of TskFileRange objects
// List<TskFileRange> data = new ArrayList<TskFileRange>();
// data.add(new TskFileRange(byteOffset, size, 0));
//
// // add the carved file
// try {
// carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data));
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex);
// }
// }
//
// // get the IngestServices object
// IngestServices is = IngestServices.getDefault();
//
// // get the parent directory of the carved files
// Content carvedFileDir = null;
// if (!carvedFiles.isEmpty()) {
// try {
// carvedFileDir = carvedFiles.get(0).getParent();
// } catch (TskCoreException ex) {
// logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex);
// }
// }
//
// // send a notification about the carved files directory
// if (carvedFileDir != null) {
// is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir));
// } else {
// logger.log(Level.SEVERE, "Could not obtain the carved files directory.");
// }
//
// // reschedule carved files
// for (LayoutFile carvedFile : carvedFiles) {
// is.scheduleFile(carvedFile, pipelineContext);
// }
//
// return ProcessResult.OK;
// }
//
//
// public static ScalpelCarverIngestModule getDefault() {
// if (instance == null) {
// synchronized (ScalpelCarverIngestModule.class) {
// if (instance == null) {
// instance = new ScalpelCarverIngestModule();
// }
// }
// }
// return instance;
// }
//
// // @Override
// public void init(IngestModuleInit initContext) {
//
// // make sure this is Windows
// String os = System.getProperty("os.name");
// if (!os.startsWith("Windows")) {
// logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time.");
// return;
// }
//
//
// carver = new ScalpelCarver();
// if (! carver.isInitialized()) {
// logger.log(Level.SEVERE, "Error initializing scalpel carver. ");
// return;
// }
//
// // make sure module output directory exists; create it if it doesn't
// moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() +
// File.separator + MODULE_OUTPUT_DIR_NAME;
// File moduleOutputDir = new File(moduleOutputDirPath);
// if (!moduleOutputDir.exists()) {
// if (!moduleOutputDir.mkdir()) {
// logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module.");
// return;
// }
// }
//
// // create path to scalpel config file in user's home directory
// configFilePath = PlatformUtil.getUserConfigDirectory()
// + File.separator + configFileName;
//
// // copy the default config file to the user's home directory if one
// // is not already there
// try {
// PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
// } catch (IOException ex) {
// logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex);
// return;
// }
//
// initialized = true;
// }
//
// // @Override
// public void complete() { }
//
// // @Override
// public void stop() { }
//
// // @Override
// public String getName() {
// return MODULE_NAME;
// }
//
// // @Override
// public String getVersion() {
// return MODULE_VERSION;
// }
//
// // @Override
// public String getDescription() {
// return MODULE_DESCRIPTION;
// }
//
// // @Override
// public boolean hasBackgroundJobsRunning() {
// return false;
// }
//
//
//
//
//
//
//}
class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileIngestModule {
private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName());
private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver";
private String moduleOutputDirPath;
private String configFileName = "scalpel.conf";
private String configFilePath;
private boolean initialized = false;
private ScalpelCarver carver;
private IngestModuleContext context;
ScalpelCarverIngestModule() {
}
@Override
public ResultCode process(AbstractFile abstractFile) {
ScalpelCarver.init();
if (!initialized) {
return ResultCode.OK;
}
// only process files whose type is TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS
TSK_DB_FILES_TYPE_ENUM type = abstractFile.getType();
if (type != TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS) {
return ResultCode.OK;
}
// create the output directory for this run
String scalpelOutputDirPath = moduleOutputDirPath + File.separator + abstractFile.getId();
File scalpelOutputDir = new File(scalpelOutputDirPath);
if (!scalpelOutputDir.exists()) {
if (!scalpelOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create Scalpel output directory: " + scalpelOutputDirPath);
return ResultCode.OK;
}
}
// find the ID of the parent FileSystem, Volume or Image
long id = -1;
Content parent = null;
try {
parent = abstractFile.getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception while trying to get parent of AbstractFile.", ex);
}
while (parent != null) {
if (parent instanceof FileSystem ||
parent instanceof Volume ||
parent instanceof Image) {
id = parent.getId();
break;
}
try {
parent = parent.getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Exception while trying to get parent of Content object.", ex);
}
}
// make sure we have a valid systemID
if (id == -1) {
logger.log(Level.SEVERE, "Could not get an ID for a FileSystem, Volume or Image for the given AbstractFile.");
return ResultCode.OK;
}
// carve the AbstractFile
List<CarvedFileMeta> output = null;
try {
output = carver.carve(abstractFile, configFilePath, scalpelOutputDirPath);
} catch (ScalpelException ex) {
logger.log(Level.SEVERE, "Error when attempting to carved data from AbstractFile with ID " + abstractFile.getId());
return ResultCode.OK;
}
// get the image's size
long imageSize = Long.MAX_VALUE;
try {
imageSize = abstractFile.getImage().getSize();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not obtain the image's size.");
}
// add a carved file to the DB for each file that scalpel carved
SleuthkitCase db = Case.getCurrentCase().getSleuthkitCase();
List<LayoutFile> carvedFiles = new ArrayList<LayoutFile>(output.size());
for (CarvedFileMeta carvedFileMeta : output) {
// calculate the byte offset of this carved file
long byteOffset;
try {
byteOffset = abstractFile.convertToImgOffset(carvedFileMeta.getByteStart());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Could not calculate the image byte offset of AbstractFile (" + abstractFile.getName() + ")");
break;
}
// get the size of the carved file
long size = carvedFileMeta.getByteLength();
// create the list of TskFileRange objects
List<TskFileRange> data = new ArrayList<TskFileRange>();
data.add(new TskFileRange(byteOffset, size, 0));
// add the carved file
try {
carvedFiles.add(db.addCarvedFile(carvedFileMeta.getFileName(), size, id, data));
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was a problem while trying to add a carved file to the database.", ex);
}
}
// get the IngestServices object
IngestServices is = IngestServices.getDefault();
// get the parent directory of the carved files
Content carvedFileDir = null;
if (!carvedFiles.isEmpty()) {
try {
carvedFileDir = carvedFiles.get(0).getParent();
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "There was a problem while trying to obtain the carved files directory.", ex);
}
}
// send a notification about the carved files directory
if (carvedFileDir != null) {
is.fireModuleContentEvent(new ModuleContentEvent(carvedFileDir));
} else {
logger.log(Level.SEVERE, "Could not obtain the carved files directory.");
}
// reschedule carved files
for (LayoutFile carvedFile : carvedFiles) {
is.scheduleFile(carvedFile, pipelineContext);
}
return ResultCode.OK;
}
@Override
public void startUp(IngestModuleContext context) throws IngestModuleException {
this.context = context;
// make sure this is Windows
String os = System.getProperty("os.name");
if (!os.startsWith("Windows")) {
logger.log(Level.WARNING, "Scalpel carving module is not compatible with non-Windows OS's at this time.");
return;
}
carver = new ScalpelCarver();
if (! carver.isInitialized()) {
logger.log(Level.SEVERE, "Error initializing scalpel carver. ");
return;
}
// make sure module output directory exists; create it if it doesn't
moduleOutputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() +
File.separator + MODULE_OUTPUT_DIR_NAME;
File moduleOutputDir = new File(moduleOutputDirPath);
if (!moduleOutputDir.exists()) {
if (!moduleOutputDir.mkdir()) {
logger.log(Level.SEVERE, "Could not create the output directory for the Scalpel module.");
return;
}
}
// create path to scalpel config file in user's home directory
configFilePath = PlatformUtil.getUserConfigDirectory()
+ File.separator + configFileName;
// copy the default config file to the user's home directory if one
// is not already there
try {
PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName);
} catch (IOException ex) {
logger.log(Level.SEVERE, "Could not obtain the path to the Scalpel configuration file.", ex);
return;
}
initialized = true;
}
}

View File

@ -0,0 +1,65 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.scalpel;
// TODO: Uncomment the following line to allow the ingest framework to use this module
//import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.IngestModuleFactoryAdapter;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
import org.sleuthkit.autopsy.ingest.IngestModuleSettings;
/**
* An factory that creates file ingest modules that use Scalpel to carve
* unallocated space.
*/
// TODO: Uncomment the following line to allow the ingest framework to use this module
//@ServiceProvider(service = IngestModuleFactory.class)
public class ScalpelCarverModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return "Scalpel Carver";
}
@Override
public String getModuleDescription() {
return "Carves files from unallocated space at ingest time.\nCarved files are reanalyzed and displayed in the directory tree.";
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public boolean isFileIngestModuleFactory() {
return true;
}
@Override
public FileIngestModule createFileIngestModule(IngestModuleSettings ingestOptions) {
return new ScalpelCarverIngestModule();
}
}

View File

@ -58,8 +58,6 @@ import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.ingest.IngestModule.ResultCode;
import org.sleuthkit.autopsy.ingest.IngestModuleContext;
// RJCTODO: Possibly use getContext().getModuleDisplayName() more
/**
* 7Zip ingest module Extracts supported archives, adds extracted DerivedFiles,
* reschedules extracted DerivedFiles for ingest.
@ -92,8 +90,8 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F
}
@Override
public void startUp(IngestModuleContext context) {
setContext(context);
public void startUp(IngestModuleContext context) throws IngestModuleException{
super.startUp(context);
unpackDir = getContext().getOutputDirectoryRelativePath();
unpackDirPath = getContext().getOutputDirectoryAbsolutePath();
fileManager = getContext().getCase().getServices().getFileManager();

View File

@ -35,21 +35,21 @@ import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleContext;
import org.sleuthkit.autopsy.ingest.IngestModuleTempApiShim;
/**
* Data source ingest module that verifies the integrity of an Expert Witness
* Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image.
*/
public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSourceIngestModule, IngestModuleTempApiShim {
public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSourceIngestModule {
private static final Logger logger = Logger.getLogger(EwfVerifyIngestModule.class.getName());
private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
private static final IngestServices services = IngestServices.getDefault();
private IngestModuleContext context;
private Image img;
private String imgName;
private MessageDigest messageDigest;
private static int messageId = 0;
private static int messageId = 0; // RJCTODO: Copy-paste synchronized implementation, put in sample also
private boolean verified = false;
private boolean skipped = false;
private String calculatedHash = "";
@ -59,13 +59,8 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
}
@Override
public String getDisplayName() {
return EwfVerifierModuleFactory.getModuleName();
}
@Override
public void startUp(IngestModuleContext context) {
setContext(context);
public void startUp(IngestModuleContext context) throws Exception {
this.context = context;
verified = false;
skipped = false;
img = null;
@ -93,8 +88,8 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
} catch (TskCoreException ex) {
img = null;
String message = "Failed to get image from Content";
getContext().logError(EwfVerifyIngestModule.class, message, ex);
getContext().postIngestMessage(++messageId, MessageType.ERROR, message);
context.logError(EwfVerifyIngestModule.class, message, ex);
context.postIngestMessage(++messageId, MessageType.ERROR, message);
return ResultCode.ERROR;
}
@ -102,7 +97,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
img = null;
logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(),
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(),
"Skipping non-ewf image " + imgName));
skipped = true;
return ResultCode.OK;
@ -114,19 +109,19 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash});
}
else {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(),
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(),
"Image " + imgName + " does not have stored hash."));
return ResultCode.ERROR;
}
logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName());
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(),
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(),
"Starting " + imgName));
long size = img.getSize();
if (size == 0) {
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(),
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(),
"Error getting size of " + imgName + ". Image will not be processed."));
}
@ -152,7 +147,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
read = img.read(data, i * chunkSize, chunkSize);
} catch (TskCoreException ex) {
String msg = "Error reading " + imgName + " at chunk " + i;
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, getDisplayName(), msg));
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, EwfVerifierModuleFactory.getModuleName(), msg));
logger.log(Level.SEVERE, msg, ex);
return ResultCode.ERROR;
}
@ -169,14 +164,14 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo
@Override
public void shutDown(boolean ingestJobCancelled) {
logger.log(Level.INFO, "complete() {0}", getDisplayName());
logger.log(Level.INFO, "complete() {0}", EwfVerifierModuleFactory.getModuleName());
if (skipped == false) {
String msg = verified ? " verified" : " not verified";
String extra = "<p>EWF Verification Results for " + imgName + "</p>";
extra += "<li>Result:" + msg + "</li>";
extra += "<li>Calculated hash: " + calculatedHash + "</li>";
extra += "<li>Stored hash: " + storedHash + "</li>";
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, getDisplayName(), imgName + msg, extra));
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, EwfVerifierModuleFactory.getModuleName(), imgName + msg, extra));
logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
}
}

View File

@ -42,7 +42,7 @@ public class EmailParserModuleFactory extends IngestModuleFactoryAdapter {
@Override
public String getModuleDisplayName() {
return "Archive Extractor";
return getModuleName();
}
@Override