Changed image-level ingest to data-source Content-level ingest.

Related changes, such as FileManager API adjustments and fixes in client code.
Simplifications in RecentActivity logic to get files per current data source (no need to use deprecated method)
This commit is contained in:
adam-m 2013-06-07 18:49:59 -04:00
parent 10d2897a23
commit fbe2dd76eb
23 changed files with 375 additions and 434 deletions

View File

@ -27,7 +27,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Level;
import org.openide.util.Exceptions;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
import org.sleuthkit.autopsy.ingest.IngestServices;
@ -35,10 +34,7 @@ import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.LocalFile;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.SleuthkitCase;
@ -81,57 +77,57 @@ public class FileManager implements Closeable {
}
/**
* @param image image where to find files
* @param dataSource data source Content (Image, parent-less VirtualDirectory) where to find files
* @param fileName the name of the file or directory to match
* @return a list of FsContent for files/directories whose name matches the
* @return a list of AbstractFile for files/directories whose name matches the
* given fileName
*/
public synchronized List<FsContent> findFiles(Image image, String fileName) throws TskCoreException {
public synchronized List<AbstractFile> findFiles(Content dataSource, String fileName) throws TskCoreException {
if (tskCase == null) {
throw new TskCoreException("Attempted to use FileManager after it was closed.");
}
return tskCase.findFiles(image, fileName);
return tskCase.findFiles(dataSource, fileName);
}
/**
* @param image image where to find files
* @param dataSource data source Content (Image, parent-less VirtualDirectory) where to find files
* @param fileName the name of the file or directory to match
* @param dirName the name of a parent directory of fileName
* @return a list of FsContent for files/directories whose name matches
* @return a list of AbstractFile for files/directories whose name matches
* fileName and whose parent directory contains dirName.
*/
public synchronized List<FsContent> findFiles(Image image, String fileName, String dirName) throws TskCoreException {
public synchronized List<AbstractFile> findFiles(Content dataSource, String fileName, String dirName) throws TskCoreException {
if (tskCase == null) {
throw new TskCoreException("Attempted to use FileManager after it was closed.");
}
return tskCase.findFiles(image, fileName, dirName);
return tskCase.findFiles(dataSource, fileName, dirName);
}
/**
* @param image image where to find files
* @param dataSource data source Content (Image, parent-less VirtualDirectory) where to find files
* @param fileName the name of the file or directory to match
* @param parentFsContent
* @return a list of FsContent for files/directories whose name matches
* @param parentFile parent file/dir of the file to find
* @return a list of AbstractFile for files/directories whose name matches
* fileName and that were inside a directory described by parentFsContent.
*/
public synchronized List<FsContent> findFiles(Image image, String fileName, FsContent parentFsContent) throws TskCoreException {
public synchronized List<AbstractFile> findFiles(Content dataSource, String fileName, AbstractFile parentFile) throws TskCoreException {
if (tskCase == null) {
throw new TskCoreException("Attempted to use FileManager after it was closed.");
}
return findFiles(image, fileName, parentFsContent.getName());
return findFiles(dataSource, fileName, parentFile.getName());
}
/**
* @param image image where to find files
* @param dataSource data source Content (Image, parent-less VirtualDirectory) where to find files
* @param filePath The full path to the file(s) of interest. This can
* optionally include the image and volume names.
* @return a list of FsContent that have the given file path.
* @return a list of AbstractFile that have the given file path.
*/
public synchronized List<FsContent> openFiles(Image image, String filePath) throws TskCoreException {
public synchronized List<AbstractFile> openFiles(Content dataSource, String filePath) throws TskCoreException {
if (tskCase == null) {
throw new TskCoreException("Attempted to use FileManager after it was closed.");
}
return tskCase.openFiles(image, filePath);
return tskCase.openFiles(dataSource, filePath);
}
/**

View File

@ -18,7 +18,7 @@
*/
package org.sleuthkit.autopsy.ingest;
//ingester worker for image queue
//ingester worker for DataSource queue
import java.awt.EventQueue;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
@ -32,37 +32,36 @@ import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.StopWatch;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
/**
* Worker thread that runs an image-level ingest module.
* Used to process only a single image and single module.
* Worker thread that runs a data source-level ingest module (image, file set virt dir, etc).
* Used to process only a single data-source and single module.
*/
public class IngestImageThread extends SwingWorker<Void, Void> {
public class IngestDataSourceThread extends SwingWorker<Void, Void> {
private final Logger logger = Logger.getLogger(IngestImageThread.class.getName());
private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName());
private ProgressHandle progress;
private final PipelineContext<IngestModuleImage>pipelineContext;
private final Image image;
private final IngestModuleImage module;
private IngestImageWorkerController controller;
private final PipelineContext<IngestModuleDataSource>pipelineContext;
private final Content dataSource;
private final IngestModuleDataSource module;
private IngestDataSourceWorkerController controller;
private final IngestManager manager;
private final IngestModuleInit init;
//current method of enqueuing image ingest modules with locks and internal lock queue
//allows to init, run and complete a single image ingest module at time
//current method of enqueuing data source ingest modules with locks and internal lock queue
//ensures that we init, run and complete a single data source ingest module at a time
//uses fairness policy to run them in order enqueued
//TODO use a real queue and manager to allow multiple different modules to run in parallel
private static final Lock imageIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
IngestImageThread(IngestManager manager, PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestModuleImage module, IngestModuleInit init) {
IngestDataSourceThread(IngestManager manager, PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestModuleDataSource module, IngestModuleInit init) {
this.manager = manager;
this.pipelineContext = pipelineContext;
this.image = image;
this.dataSource = dataSource;
this.module = module;
this.init = init;
}
PipelineContext<IngestModuleImage>getContext() {
PipelineContext<IngestModuleDataSource>getContext() {
return pipelineContext;
}
@ -70,7 +69,7 @@ public class IngestImageThread extends SwingWorker<Void, Void> {
return pipelineContext.getScheduledTask().getContent();
}
IngestModuleImage getModule() {
IngestModuleDataSource getModule() {
return module;
}
@ -79,21 +78,21 @@ public class IngestImageThread extends SwingWorker<Void, Void> {
logger.log(Level.INFO, "Pending module: " + module.getName());
final String displayName = module.getName() + " image id:" + image.getId();
final String displayName = module.getName() + " dataSource id:" + dataSource.getId();
progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "Image ingest module " + module.getName() + " cancelled by user.");
logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user.");
if (progress != null) {
progress.setDisplayName(displayName + " (Cancelling...)");
}
return IngestImageThread.this.cancel(true);
return IngestDataSourceThread.this.cancel(true);
}
});
progress.start();
progress.switchToIndeterminate();
imageIngestModuleLock.lock();
dataSourceIngestModuleLock.lock();
try {
if (this.isCancelled()) {
logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName());
@ -114,26 +113,26 @@ public class IngestImageThread extends SwingWorker<Void, Void> {
logger.log(Level.INFO, "Starting processing of module: " + module.getName());
controller = new IngestImageWorkerController(this, progress);
controller = new IngestDataSourceWorkerController(this, progress);
if (isCancelled()) {
logger.log(Level.INFO, "Terminating image ingest module " + module.getName() + " due to cancellation.");
logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation.");
return Void.TYPE.newInstance();
}
final StopWatch timer = new StopWatch();
timer.start();
try {
module.process(pipelineContext, image, controller);
module.process(pipelineContext, dataSource, controller);
} catch (Exception e) {
logger.log(Level.WARNING, "Exception in module: " + module.getName() + " image: " + image.getName(), e);
logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e);
} finally {
timer.stop();
logger.log(Level.INFO, "Done processing of module: " + module.getName()
+ " took " + timer.getElapsedTimeSecs() + " secs. to process()");
//cleanup queues (worker and image/module)
manager.removeImageIngestWorker(this);
//cleanup queues (worker and DataSource/module)
manager.removeDataSourceIngestWorker(this);
if (!this.isCancelled()) {
logger.log(Level.INFO, "Module " + module.getName() + " completed");
@ -157,7 +156,7 @@ public class IngestImageThread extends SwingWorker<Void, Void> {
return Void.TYPE.newInstance();
} finally {
//release the lock so next module can run
imageIngestModuleLock.unlock();
dataSourceIngestModuleLock.unlock();
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {

View File

@ -21,20 +21,20 @@ package org.sleuthkit.autopsy.ingest;
import org.netbeans.api.progress.ProgressHandle;
/**
* Controller for image level ingest modules
* Controller for DataSource level ingest modules
* Used by modules to check task status and to post progress to
*/
public class IngestImageWorkerController {
public class IngestDataSourceWorkerController {
private IngestImageThread worker;
private IngestDataSourceThread worker;
private ProgressHandle progress;
/**
* Instantiate the controller for the worker
* @param worker underlying image ingest thread
* @param worker underlying DataSource ingest thread
* @param progress the progress handle
*/
IngestImageWorkerController(IngestImageThread worker, ProgressHandle progress) {
IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) {
this.worker = worker;
this.progress = progress;
}
@ -51,7 +51,7 @@ public class IngestImageWorkerController {
/**
* Update the progress bar and switch to determinate mode once number of total work units is known
* @param workUnits total number of work units for the image ingest task
* @param workUnits total number of work units for the DataSource ingest task
*/
public void switchToDeterminate(int workUnits) {
if (progress != null) {

View File

@ -79,8 +79,8 @@ public class IngestDialogPanel extends javax.swing.JPanel implements IngestConfi
private void loadModules() {
this.modules.clear();
//this.moduleStates.clear(); maintain the state
Collection<IngestModuleImage> imageModules = manager.enumerateImageModules();
for (final IngestModuleImage module : imageModules) {
Collection<IngestModuleDataSource> imageModules = manager.enumerateDataSourceModules();
for (final IngestModuleDataSource module : imageModules) {
addModule(module);
}
Collection<IngestModuleAbstractFile> fsModules = manager.enumerateAbstractFileModules();

View File

@ -43,7 +43,6 @@ import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestScheduler.FileScheduler.ProcessTask;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
/**
* IngestManager sets up and manages ingest modules runs them in a background
@ -62,10 +61,10 @@ public class IngestManager {
private final IngestScheduler scheduler;
//workers
private IngestAbstractFileProcessor abstractFileIngester;
private List<IngestImageThread> imageIngesters;
private List<IngestDataSourceThread> dataSourceIngesters;
private SwingWorker<Object, Void> queueWorker;
//modules
private List<IngestModuleImage> imageModules;
private List<IngestModuleDataSource> dataSourceModules;
private List<IngestModuleAbstractFile> abstractFileModules;
// module return values
private final Map<String, IngestModuleAbstractFile.ProcessResult> abstractFileModulesRetValues = new HashMap<String, IngestModuleAbstractFile.ProcessResult>();
@ -127,7 +126,7 @@ public class IngestManager {
private static volatile IngestManager instance;
private IngestManager() {
imageIngesters = new ArrayList<IngestImageThread>();
dataSourceIngesters = new ArrayList<IngestDataSourceThread>();
scheduler = IngestScheduler.getInstance();
@ -147,11 +146,11 @@ public class IngestManager {
if (evt.getPropertyName().equals(IngestModuleLoader.Event.ModulesReloaded.toString())) {
//TODO might need to not allow to remove modules if they are running
abstractFileModules = moduleLoader.getAbstractFileIngestModules();
imageModules = moduleLoader.getImageIngestModules();
dataSourceModules = moduleLoader.getDataSourceIngestModules();
}
}
});
imageModules = moduleLoader.getImageIngestModules();
dataSourceModules = moduleLoader.getDataSourceIngestModules();
} catch (IngestModuleLoaderException ex) {
logger.log(Level.SEVERE, "Error getting module loader");
}
@ -224,11 +223,11 @@ public class IngestManager {
}
/**
* Multiple image version of execute() method. Enqueues multiple data inputs (Content objects)
* Multiple data-sources version of execute() method. Enqueues multiple sources inputs (Content objects)
* and associated modules at once
*
* @param modules modules to execute on every image
* @param inputs inputs to enqueue and execute the ingest modules on
* @param modules modules to execute on every data source
* @param inputs input data sources to enqueue and execute the ingest modules on
*/
public void execute(final List<IngestModuleAbstract> modules, final List<Content> inputs) {
logger.log(Level.INFO, "Will enqueue number of inputs: " + inputs.size()
@ -244,7 +243,7 @@ public class IngestManager {
if (ui != null) {
ui.restoreMessages();
}
//logger.log(Level.INFO, "Queues: " + imageQueue.toString() + " " + AbstractFileQueue.toString());
}
/**
@ -258,8 +257,8 @@ public class IngestManager {
* not block and can be called multiple times to enqueue more work to
* already running background ingest process.
*
* @param modules modules to execute on the image
* @param input input Content objects to execute the ingest modules on
* @param modules modules to execute on the data source input
* @param input input data source Content objects to execute the ingest modules on
*/
public void execute(final List<IngestModuleAbstract> modules, final Content input) {
List<Content> inputs = new ArrayList<Content>();
@ -269,7 +268,7 @@ public class IngestManager {
}
/**
* Schedule a file for ingest and add it to ongoing file ingest process on the same image.
* Schedule a file for ingest and add it to ongoing file ingest process on the same data source.
* Scheduler updates the current progress.
*
* The file to be added is usually a product of a currently ran ingest.
@ -289,14 +288,14 @@ public class IngestManager {
* if AbstractFile module is still running, do nothing and allow it to
* consume queue otherwise start /restart AbstractFile worker
*
* image workers run per (module,image). Check if one for the (module,image)
* data source ingest workers run per (module,content). Checks if one for the same (module,content)
* is already running otherwise start/restart the worker
*/
private synchronized void startAll() {
final IngestScheduler.ImageScheduler imageScheduler = scheduler.getImageScheduler();
final IngestScheduler.DataSourceScheduler dataSourceScheduler = scheduler.getDataSourceScheduler();
final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler();
logger.log(Level.INFO, "Image queue: " + imageScheduler.toString());
logger.log(Level.INFO, "DataSource queue: " + dataSourceScheduler.toString());
logger.log(Level.INFO, "File queue: " + fileScheduler.toString());
if (!ingestMonitor.isRunning()) {
@ -304,45 +303,45 @@ public class IngestManager {
}
//image ingesters
// cycle through each image in the queue
while (imageScheduler.hasNext()) {
// cycle through each data source content in the queue
while (dataSourceScheduler.hasNext()) {
//dequeue
// get next image and set of modules
final ScheduledTask<IngestModuleImage> imageTask = imageScheduler.next();
// get next data source content and set of modules
final ScheduledTask<IngestModuleDataSource> dataSourceTask = dataSourceScheduler.next();
// check if each module for this image is already running
for (IngestModuleImage taskModule : imageTask.getModules()) {
// check if each module for this data source content is already running
for (IngestModuleDataSource taskModule : dataSourceTask.getModules()) {
boolean alreadyRunning = false;
for (IngestImageThread worker : imageIngesters) {
// ignore threads that are on different images
if (!worker.getContent().equals(imageTask.getContent())) {
for (IngestDataSourceThread worker : dataSourceIngesters) {
// ignore threads that are on different data sources
if (!worker.getContent().equals(dataSourceTask.getContent())) {
continue; //check next worker
}
//same image, check module (by name, not id, since different instances)
//same data source, check module (by name, not id, since different instances)
if (worker.getModule().getName().equals(taskModule.getName())) {
alreadyRunning = true;
logger.log(Level.INFO, "Image Ingester <" + imageTask.getContent()
logger.log(Level.INFO, "Data Source Ingester <" + dataSourceTask.getContent()
+ ", " + taskModule.getName() + "> is already running");
break;
}
}
//checked all workers
if (alreadyRunning == false) {
logger.log(Level.INFO, "Starting new image Ingester <" + imageTask.getContent()
logger.log(Level.INFO, "Starting new data source Ingester <" + dataSourceTask.getContent()
+ ", " + taskModule.getName() + ">");
//image modules are now initialized per instance
//data source modules are now initialized per instance
IngestModuleInit moduleInit = new IngestModuleInit();
moduleInit.setModuleArgs(taskModule.getArguments());
PipelineContext<IngestModuleImage> imagepipelineContext =
new PipelineContext<IngestModuleImage>(imageTask, getProcessUnallocSpace());
final IngestImageThread newImageWorker = new IngestImageThread(this,
imagepipelineContext, (Image)imageTask.getContent(), taskModule, moduleInit);
PipelineContext<IngestModuleDataSource> dataSourcepipelineContext =
new PipelineContext<IngestModuleDataSource>(dataSourceTask, getProcessUnallocSpace());
final IngestDataSourceThread newDataSourceWorker = new IngestDataSourceThread(this,
dataSourcepipelineContext, dataSourceTask.getContent(), taskModule, moduleInit);
imageIngesters.add(newImageWorker);
dataSourceIngesters.add(newDataSourceWorker);
//wrap the module in a worker, that will run init, process and complete on the module
newImageWorker.execute();
newDataSourceWorker.execute();
IngestManager.fireModuleEvent(IngestModuleEvent.STARTED.toString(), taskModule.getName());
}
}
@ -395,7 +394,7 @@ public class IngestManager {
//empty queues
scheduler.getFileScheduler().empty();
scheduler.getImageScheduler().empty();
scheduler.getDataSourceScheduler().empty();
//stop module workers
if (abstractFileIngester != null) {
@ -420,24 +419,24 @@ public class IngestManager {
}
List<IngestImageThread> toStop = new ArrayList<IngestImageThread>();
toStop.addAll(imageIngesters);
List<IngestDataSourceThread> toStop = new ArrayList<IngestDataSourceThread>();
toStop.addAll(dataSourceIngesters);
for (IngestImageThread imageWorker : toStop) {
IngestModuleImage s = imageWorker.getModule();
for (IngestDataSourceThread dataSourceWorker : toStop) {
IngestModuleDataSource s = dataSourceWorker.getModule();
//stop the worker thread if thread is running
boolean cancelled = imageWorker.cancel(true);
boolean cancelled = dataSourceWorker.cancel(true);
if (!cancelled) {
logger.log(Level.INFO, "Unable to cancel image ingest worker for module: "
+ imageWorker.getModule().getName() + " img: " + imageWorker.getContent().getName());
logger.log(Level.INFO, "Unable to cancel data source ingest worker for module: "
+ dataSourceWorker.getModule().getName() + " data source: " + dataSourceWorker.getContent().getName());
}
//stop notification to module to cleanup resources
if (isModuleRunning(s)) {
try {
imageWorker.getModule().stop();
dataSourceWorker.getModule().stop();
} catch (Exception e) {
logger.log(Level.WARNING, "Exception while stopping module: " + s.getName(), e);
}
@ -458,7 +457,7 @@ public class IngestManager {
return true;
} else if (isFileIngestRunning()) {
return true;
} else if (isImageIngestRunning()) {
} else if (isDataSourceIngestRunning()) {
return true;
} else {
return false;
@ -477,7 +476,7 @@ public class IngestManager {
return true;
}
}
for (IngestImageThread thread : imageIngesters) {
for (IngestDataSourceThread thread : dataSourceIngesters) {
if (isModuleRunning(thread.getModule())) {
return false;
}
@ -506,16 +505,16 @@ public class IngestManager {
}
/**
* check the status of the image-level ingest pipeline
* check the status of the data-source-level ingest pipeline
*/
public synchronized boolean isImageIngestRunning() {
if (imageIngesters.isEmpty()) {
public synchronized boolean isDataSourceIngestRunning() {
if (dataSourceIngesters.isEmpty()) {
return false;
}
//in case there are still image ingesters in the queue but already done
//in case there are still data source ingesters in the queue but already done
boolean allDone = true;
for (IngestImageThread ii : imageIngesters) {
for (IngestDataSourceThread ii : dataSourceIngesters) {
if (ii.isDone() == false) {
allDone = false;
break;
@ -549,13 +548,13 @@ public class IngestManager {
} else {
//image module
//data source module
synchronized (this) {
if (imageIngesters.isEmpty()) {
if (dataSourceIngesters.isEmpty()) {
return false;
}
IngestImageThread imt = null;
for (IngestImageThread ii : imageIngesters) {
IngestDataSourceThread imt = null;
for (IngestDataSourceThread ii : dataSourceIngesters) {
if (ii.getModule().equals(module)) {
imt = ii;
break;
@ -639,11 +638,11 @@ public class IngestManager {
}
/**
* helper to return all loaded image modules managed sorted in order as
* helper to return all loaded data-source ingest modules managed sorted in order as
* specified in pipeline_config XML
*/
public List<IngestModuleImage> enumerateImageModules() {
return moduleLoader.getImageIngestModules();
public List<IngestModuleDataSource> enumerateDataSourceModules() {
return moduleLoader.getDataSourceIngestModules();
}
/**
@ -654,11 +653,11 @@ public class IngestManager {
return moduleLoader.getAbstractFileIngestModules();
}
//image worker to remove itself when complete or interrupted
void removeImageIngestWorker(IngestImageThread worker) {
//data source worker to remove itself when complete or interrupted
void removeDataSourceIngestWorker(IngestDataSourceThread worker) {
//remove worker
synchronized (this) {
imageIngesters.remove(worker);
dataSourceIngesters.remove(worker);
}
}
@ -674,7 +673,7 @@ public class IngestManager {
private final DateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
private final StopWatch timer = new StopWatch();
private IngestModuleAbstract currentModuleForTimer;
//file module timing stats, image module timers are logged in IngestImageThread class
//file module timing stats, datasource module timers are logged in IngestDataSourceThread class
private final Map<String, Long> fileModuleTimers = new HashMap<String, Long>();
IngestManagerStats() {
@ -924,7 +923,7 @@ public class IngestManager {
progress.switchToIndeterminate();
progress.switchToDeterminate(totalEnqueuedFiles);
}
if (processedFiles < totalEnqueuedFiles) { //fix for now to handle the same image enqueued twice
if (processedFiles < totalEnqueuedFiles) { //fix for now to handle the same datasource Content enqueued twice
++processedFiles;
}
//--totalEnqueuedFiles;
@ -999,7 +998,7 @@ public class IngestManager {
}
}
/* Thread that adds image/file and module pairs to queues */
/* Thread that adds content/file and module pairs to queues */
private class EnqueueWorker extends SwingWorker<Object, Void> {
private List<IngestModuleAbstract> modules;
@ -1065,14 +1064,14 @@ public class IngestManager {
private void queueAll(List<IngestModuleAbstract> modules, final List<Content> inputs) {
final IngestScheduler.ImageScheduler imageScheduler = scheduler.getImageScheduler();
final IngestScheduler.DataSourceScheduler dataSourceScheduler = scheduler.getDataSourceScheduler();
final IngestScheduler.FileScheduler fileScheduler = scheduler.getFileScheduler();
int processed = 0;
for (Content input : inputs) {
final String inputName = input.getName();
final List<IngestModuleImage> imageMods = new ArrayList<IngestModuleImage>();
final List<IngestModuleDataSource> dataSourceMods = new ArrayList<IngestModuleDataSource>();
final List<IngestModuleAbstractFile> fileMods = new ArrayList<IngestModuleAbstractFile>();
for (IngestModuleAbstract module : modules) {
@ -1085,11 +1084,11 @@ public class IngestManager {
progress.progress(moduleName + " " + inputName, processed);
switch (module.getType()) {
case Image:
final IngestModuleImage newModuleInstance =
(IngestModuleImage) moduleLoader.getNewIngestModuleInstance(module);
case DataSource:
final IngestModuleDataSource newModuleInstance =
(IngestModuleDataSource) moduleLoader.getNewIngestModuleInstance(module);
if (newModuleInstance != null) {
imageMods.add(newModuleInstance);
dataSourceMods.add(newModuleInstance);
} else {
logger.log(Level.INFO, "Error loading module and adding input " + inputName
+ " with module " + module.getName());
@ -1112,16 +1111,16 @@ public class IngestManager {
//queue to schedulers
//queue to image-level ingest pipeline(s)
//queue to datasource-level ingest pipeline(s)
final boolean processUnalloc = getProcessUnallocSpace();
final ScheduledTask<IngestModuleImage> imageTask =
new ScheduledTask<IngestModuleImage>(input, imageMods);
final PipelineContext<IngestModuleImage> imagepipelineContext =
new PipelineContext<IngestModuleImage>(imageTask, processUnalloc);
logger.log(Level.INFO, "Queing image ingest task: " + imageTask);
progress.progress("Image Ingest" + " " + inputName, processed);
imageScheduler.schedule(imagepipelineContext);
progress.progress("Image Ingest" + " " + inputName, ++processed);
final ScheduledTask<IngestModuleDataSource> dataSourceTask =
new ScheduledTask<IngestModuleDataSource>(input, dataSourceMods);
final PipelineContext<IngestModuleDataSource> dataSourcePipelineContext =
new PipelineContext<IngestModuleDataSource>(dataSourceTask, processUnalloc);
logger.log(Level.INFO, "Queing data source ingest task: " + dataSourceTask);
progress.progress("DataSource Ingest" + " " + inputName, processed);
dataSourceScheduler.schedule(dataSourcePipelineContext);
progress.progress("DataSource Ingest" + " " + inputName, ++processed);
//queue to file-level ingest pipeline
final ScheduledTask<IngestModuleAbstractFile> fTask =
@ -1133,7 +1132,7 @@ public class IngestManager {
fileScheduler.schedule(filepipelineContext);
progress.progress("File Ingest" + " " + inputName, ++processed);
} //for images
} //for data sources
//logger.log(Level.INFO, AbstractFileQueue.printQueue());
@ -1143,7 +1142,7 @@ public class IngestManager {
logger.log(Level.SEVERE, "Error while enqueing files. ", ex);
//empty queues
scheduler.getFileScheduler().empty();
scheduler.getImageScheduler().empty();
scheduler.getDataSourceScheduler().empty();
}
}
}

View File

@ -350,7 +350,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener {
for (IngestModuleAbstract module : manager.enumerateAbstractFileModules()) {
groupings.put(module, new HashMap<String, List<IngestMessageGroup>>());
}
for (IngestModuleAbstract module : manager.enumerateImageModules()) {
for (IngestModuleAbstract module : manager.enumerateDataSourceModules()) {
groupings.put(module, new HashMap<String, List<IngestMessageGroup>>());
}
}

View File

@ -39,7 +39,7 @@ import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
/**
* Top component which displays something.
@ -327,7 +327,7 @@ public final class IngestMessageTopComponent extends TopComponent implements Ing
}
@Override
public void displayIngestDialog(final Image image) {
public void displayIngestDialog(final Content ingestDataSource) {
/*
final IngestDialog ingestDialog = new IngestDialog();
ingestDialog.setImage(image);

View File

@ -32,9 +32,9 @@ public abstract class IngestModuleAbstract {
*/
public enum ModuleType {
/**
* Image type module
* DataSource type module
*/
Image,
DataSource,
/**
* AbstractFile type module
@ -94,7 +94,7 @@ public abstract class IngestModuleAbstract {
abstract public String getDescription();
/**
* Returns type of the module (Image-level or file-level)
* Returns type of the module (data source-level or file-level)
* @return module type
*/
abstract public ModuleType getType();

View File

@ -21,7 +21,7 @@ package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.AbstractFile;
/**
* Ingest module interface that will be called for every file in the image
* Ingest module interface that will be called for every file in the data source Content
*/
public abstract class IngestModuleAbstractFile extends IngestModuleAbstract {

View File

@ -18,7 +18,7 @@
*/
package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
/**
* Ingest module that acts on entire image.
@ -26,15 +26,15 @@ import org.sleuthkit.datamodel.Image;
* A new instance of this module will be created for each image.
* Therefore, image-level modules can assume that the process() method will be called at most once after init() is called.
*/
public abstract class IngestModuleImage extends IngestModuleAbstract {
public abstract class IngestModuleDataSource extends IngestModuleAbstract {
@Override
public ModuleType getType() {
return ModuleType.Image;
return ModuleType.DataSource;
}
/**
* Called with the image to analyze.
* Called with the data source Content object to analyze.
*
* Modules typically use FileManager to get specific files to analyze.
*
@ -42,11 +42,11 @@ public abstract class IngestModuleImage extends IngestModuleAbstract {
* The module should also send messages to the ingest inbox with interesting events (data, errors, warnings, infos).
* The module notifies data viewers by firing events using IngestManagerProxy.fireModuleDataEvent
*
* The module will have its own progress bar while it is running and it should update it with the IngestImageWorkerController object.
* The module will have its own progress bar while it is running and it should update it with the IngestDataSourceWorkerController object.
*
* @param pipelineContext Context in which the ingest pipeline is running (Settings, modules, etc)
* @param image Image to process
* @param dataSource data source to process (such as Image, VirtualDirectory for file etc, etc)
* @param controller Used to update progress bar and to check if the task has been canceled.
*/
abstract public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller);
abstract public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller);
}

View File

@ -100,7 +100,7 @@ public final class IngestModuleLoader {
private final List<IngestModuleLoader.XmlPipelineRaw> pipelinesXML;
//validated pipelines with instantiated modules
private final List<IngestModuleAbstractFile> filePipeline;
private final List<IngestModuleImage> imagePipeline;
private final List<IngestModuleDataSource> dataSourcePipeline;
private static final Logger logger = Logger.getLogger(IngestModuleLoader.class.getName());
private ClassLoader classLoader;
private PropertyChangeSupport pcs;
@ -121,7 +121,7 @@ public final class IngestModuleLoader {
private IngestModuleLoader() {
pipelinesXML = new ArrayList<IngestModuleLoader.XmlPipelineRaw>();
filePipeline = new ArrayList<IngestModuleAbstractFile>();
imagePipeline = new ArrayList<IngestModuleImage>();
dataSourcePipeline = new ArrayList<IngestModuleDataSource>();
dateFormatter = new SimpleDateFormat(DATE_FORMAT);
String numModDiscoveredStr = ModuleSettings.getConfigSetting(IngestManager.MODULE_PROPERTIES, CUR_MODULES_DISCOVERED_SETTING);
@ -263,8 +263,8 @@ public final class IngestModuleLoader {
} catch (SecurityException ex) {
Exceptions.printStackTrace(ex);
}
} //if image module: check if has public constructor with no args
else if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS) {
} //if data source module: check if has public constructor with no args
else if (pType == IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS) {
try {
Constructor<?> constr = moduleClass.getConstructor();
int modifiers = constr.getModifiers();
@ -487,10 +487,10 @@ public final class IngestModuleLoader {
logger.log(Level.INFO, "Found file ingest module in: " + basePackageName + ": " + it.next().toString());
}
Set<?> imageModules = reflections.getSubTypesOf(IngestModuleImage.class);
it = imageModules.iterator();
Set<?> dataSourceModules = reflections.getSubTypesOf(IngestModuleDataSource.class);
it = dataSourceModules.iterator();
while (it.hasNext()) {
logger.log(Level.INFO, "Found image ingest module in: " + basePackageName + ": " + it.next().toString());
logger.log(Level.INFO, "Found DataSource ingest module in: " + basePackageName + ": " + it.next().toString());
}
//find out which modules to add
@ -528,13 +528,13 @@ public final class IngestModuleLoader {
}
it = imageModules.iterator();
it = dataSourceModules.iterator();
while (it.hasNext()) {
boolean exists = false;
Class<IngestModuleImage> foundClass = (Class<IngestModuleImage>) it.next();
Class<IngestModuleDataSource> foundClass = (Class<IngestModuleDataSource>) it.next();
for (IngestModuleLoader.XmlPipelineRaw rawP : pipelinesXML) {
if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS.toString())) {
if (!rawP.type.equals(IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS.toString())) {
continue; //skip
}
@ -552,9 +552,9 @@ public final class IngestModuleLoader {
}
if (exists == false) {
logger.log(Level.INFO, "Discovered a new image module to load: " + foundClass.getName());
logger.log(Level.INFO, "Discovered a new DataSource module to load: " + foundClass.getName());
//ADD MODULE
addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.IMAGE_ANALYSIS);
addModuleToRawPipeline(foundClass, IngestModuleLoader.XmlPipelineRaw.PIPELINE_TYPE.DATA_SOURCE_ANALYSIS);
modulesChanged = true;
}
@ -703,7 +703,7 @@ public final class IngestModuleLoader {
//clear current
filePipeline.clear();
imagePipeline.clear();
dataSourcePipeline.clear();
//add autodiscovered modules to pipelinesXML
autodiscover();
@ -768,18 +768,18 @@ public final class IngestModuleLoader {
}
filePipeline.add(fileModuleInstance);
break;
case IMAGE_ANALYSIS:
final Class<IngestModuleImage> imageModuleClass =
(Class<IngestModuleImage>) Class.forName(pMod.location, true, classLoader);
case DATA_SOURCE_ANALYSIS:
final Class<IngestModuleDataSource> dataSourceModuleClass =
(Class<IngestModuleDataSource>) Class.forName(pMod.location, true, classLoader);
try {
Constructor<IngestModuleImage> constr = imageModuleClass.getConstructor();
IngestModuleImage imageModuleInstance = constr.newInstance();
Constructor<IngestModuleDataSource> constr = dataSourceModuleClass.getConstructor();
IngestModuleDataSource dataSourceModuleInstance = constr.newInstance();
if (imageModuleInstance != null) {
if (dataSourceModuleInstance != null) {
//set arguments
imageModuleInstance.setArguments(pMod.arguments);
imagePipeline.add(imageModuleInstance);
dataSourceModuleInstance.setArguments(pMod.arguments);
dataSourcePipeline.add(dataSourceModuleInstance);
}
} catch (NoSuchMethodException | SecurityException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
@ -940,12 +940,12 @@ public final class IngestModuleLoader {
}
/**
* Get loaded image modules
* Get loaded data source modules
*
* @return image modules loaded
* @return data source modules loaded
*/
public List<IngestModuleImage> getImageIngestModules() {
return imagePipeline;
public List<IngestModuleDataSource> getDataSourceIngestModules() {
return dataSourcePipeline;
}
//pipeline XML representation
@ -964,7 +964,7 @@ public final class IngestModuleLoader {
return IngestModuleAbstractFile.class;
}
},
IMAGE_ANALYSIS {
DATA_SOURCE_ANALYSIS {
@Override
public String toString() {
return "ImageAnalysis";
@ -972,7 +972,7 @@ public final class IngestModuleLoader {
@Override
public Class getIngestModuleInterface() {
return IngestModuleImage.class;
return IngestModuleDataSource.class;
}
},;
}

View File

@ -40,7 +40,6 @@ import org.sleuthkit.datamodel.DerivedFile;
import org.sleuthkit.datamodel.Directory;
import org.sleuthkit.datamodel.File;
import org.sleuthkit.datamodel.FileSystem;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.VirtualDirectory;
import org.sleuthkit.datamodel.LayoutFile;
import org.sleuthkit.datamodel.LocalFile;
@ -51,12 +50,12 @@ import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
/**
* Schedules images and files with their associated modules for ingest, and
* Schedules data source (images, file-sets, etc) and files with their associated modules for ingest, and
* manage queues of the scheduled tasks.
*
* Currently a singleton object only.
* Currently a singleton object only (as there is one pipeline at a time)
*
* Contains internal schedulers for content objects into image and file ingest
* Contains internal schedulers for content objects into data source and and file ingest
* pipelines.
*
*/
@ -64,7 +63,7 @@ class IngestScheduler {
private static IngestScheduler instance;
private static Logger logger = Logger.getLogger(IngestScheduler.class.getName());
private final ImageScheduler imageScheduler = new ImageScheduler();
private final DataSourceScheduler dataSourceScheduler = new DataSourceScheduler();
private final FileScheduler fileScheduler = new FileScheduler();
private IngestScheduler() {
@ -83,8 +82,8 @@ class IngestScheduler {
return instance;
}
ImageScheduler getImageScheduler() {
return imageScheduler;
DataSourceScheduler getDataSourceScheduler() {
return dataSourceScheduler;
}
FileScheduler getFileScheduler() {
@ -94,10 +93,10 @@ class IngestScheduler {
/**
* FileScheduler ingest scheduler
*
* Supports addition ScheduledTasks - tuples of (image, modules)
* Supports addition ScheduledTasks - tuples of (data-source, modules)
*
* Enqueues files and modules, and sorts the files by priority. Maintains
* only top level directories in memory, not all files in image.
* only top level directories in memory (not all children files of the scheduled container content objects)
*
* getNext() will return next ProcessTask - tuple of (file, modules)
*
@ -110,7 +109,7 @@ class IngestScheduler {
private List<ProcessTask> curDirProcessTasks;
//list of files being processed in the currently processed directory
private LinkedList<ProcessTask> curFileProcessTasks; //need to add to start and end quickly
//estimated files to be enqueued for current images
//estimated total files to be enqueued for currently scheduled content objects
private int filesEnqueuedEst;
private int filesDequeued;
private final static int FAT_NTFS_FLAGS =
@ -188,7 +187,7 @@ class IngestScheduler {
/**
* Get number of files dequeued so far This is reset after the same
* image is enqueued that is already in a queue
* content is enqueued that is already in a queue
*
* @return number of files dequeued so far
*/
@ -197,8 +196,8 @@ class IngestScheduler {
}
/**
* Task for a specific file to process.
* More specific than the higher-level ScheduledTask.
* Task for a specific file to process. More specific than the
* higher-level ScheduledTask.
*/
static class ProcessTask {
@ -260,7 +259,7 @@ class IngestScheduler {
}
/**
* Create 1 or more ProcessTasks for each root dir in the image from
* Create 1 or more ProcessTasks for each root dir in the Content from
* the context supplied
*
* @param context the original ingest context
@ -358,8 +357,8 @@ class IngestScheduler {
* as the parent origin file.
*
* @param file file to be scheduled
* @param originalContext original image schedule context that was used
* to schedule the parent origin file, with the modules, settings, etc.
* @param originalContext original content schedule context that was used
* to schedule the parent origin content, with the modules, settings, etc.
*/
synchronized void schedule(AbstractFile file, PipelineContext originalContext) {
ScheduledTask originalTask = originalContext.getScheduledTask();
@ -382,7 +381,7 @@ class IngestScheduler {
* Schedule new Content object for a file ingest with associated
* modules.
*
* @param task image schedule task with image and associated modules
* @param context context to schedule, with scheduled task containing content to process and modules
*/
synchronized void schedule(PipelineContext<IngestModuleAbstractFile> context) {
@ -396,13 +395,13 @@ class IngestScheduler {
final Content contentToSchedule = task.getContent();
if (getSourceContent().contains(contentToSchedule)) {
//reset counters if the same image enqueued twice
//reset counters if the same content enqueued twice
//Note, not very accurate, because we may have processed some files from
//another image
//another content
this.filesDequeued = 0;
}
//remove duplicate scheduled tasks for this image if enqueued previously
//remove duplicate scheduled tasks still in queues for this content if enqueued previously
removeDupTasks(task);
List<ProcessTask> rootTasks = ProcessTask.createFromScheduledTask(context);
@ -752,9 +751,9 @@ class IngestScheduler {
}
/**
* Get counts of ingestable files/dirs for the image input source.
* Get counts of ingestable files/dirs for the content input source.
*
* Includes counts of all unalloc files (for the fs, image, volume) even
* Note, also includes counts of all unalloc children files (for the fs, image, volume) even
* if ingest didn't ask for them
*/
static class GetFilesCountVisitor extends ContentVisitor.Default<Long> {
@ -884,32 +883,38 @@ class IngestScheduler {
}
/**
* ImageScheduler ingest scheduler
* DataSourceScheduler ingest scheduler
*/
static class ImageScheduler implements Iterator<ScheduledTask<IngestModuleImage>> {
static class DataSourceScheduler implements Iterator<ScheduledTask<IngestModuleDataSource>> {
private LinkedList<ScheduledTask<IngestModuleImage>> tasks;
private LinkedList<ScheduledTask<IngestModuleDataSource>> tasks;
ImageScheduler() {
tasks = new LinkedList<ScheduledTask<IngestModuleImage>>();
DataSourceScheduler() {
tasks = new LinkedList<ScheduledTask<IngestModuleDataSource>>();
}
synchronized void schedule(PipelineContext<IngestModuleImage> context) {
synchronized void schedule(PipelineContext<IngestModuleDataSource> context) {
ScheduledTask<IngestModuleImage> task = context.getScheduledTask();
ScheduledTask<IngestModuleDataSource> task = context.getScheduledTask();
//skip if task contains no modules
if (task.getModules().isEmpty()) {
return;
}
if (!(task.getContent() instanceof Image)) {
//only accepting Image content objects
try {
if (task.getContent().getParent() != null) {
//only accepting parent-less content objects (Image, parentless VirtualDirectory)
logger.log(Level.SEVERE, "Only parent-less Content (data sources) can be scheduled for DataSource ingest, skipping: " + task.getContent());
return;
}
} catch (TskCoreException e) {
logger.log(Level.SEVERE, "Error validating data source to be scheduled for DataSource ingest" + task.getContent(), e);
return;
}
ScheduledTask<IngestModuleImage> existTask = null;
for (ScheduledTask<IngestModuleImage> curTask : tasks) {
ScheduledTask<IngestModuleDataSource> existTask = null;
for (ScheduledTask<IngestModuleDataSource> curTask : tasks) {
if (curTask.getContent().equals(task.getContent())) {
existTask = curTask;
break;
@ -917,7 +922,7 @@ class IngestScheduler {
}
if (existTask != null) {
//merge modules for the image task
//merge modules for the data source task
existTask.addModules(task.getModules());
} else {
//enqueue a new task
@ -926,23 +931,23 @@ class IngestScheduler {
}
@Override
public synchronized ScheduledTask<IngestModuleImage> next() throws IllegalStateException {
public synchronized ScheduledTask<IngestModuleDataSource> next() throws IllegalStateException {
if (!hasNext()) {
throw new IllegalStateException("There is image tasks in the queue, check hasNext()");
throw new IllegalStateException("There is no data source tasks in the queue, check hasNext()");
}
final ScheduledTask<IngestModuleImage> ret = tasks.pollFirst();
final ScheduledTask<IngestModuleDataSource> ret = tasks.pollFirst();
return ret;
}
/**
* get all images that are scheduled to process
* get all data source that are scheduled to process
*
* @return list of images in the queue scheduled to process
* @return list of data sources in the queue scheduled to process
*/
synchronized List<org.sleuthkit.datamodel.Content> getContents() {
List<org.sleuthkit.datamodel.Content> contents = new ArrayList<org.sleuthkit.datamodel.Content>();
for (ScheduledTask<IngestModuleImage> task : tasks) {
for (ScheduledTask<IngestModuleDataSource> task : tasks) {
contents.add(task.getContent());
}
return contents;
@ -955,7 +960,7 @@ class IngestScheduler {
@Override
public void remove() {
throw new UnsupportedOperationException("Removing of scheduled image ingest tasks is not supported. ");
throw new UnsupportedOperationException("Removing of scheduled data source ingest tasks is not supported. ");
}
synchronized void empty() {
@ -969,8 +974,8 @@ class IngestScheduler {
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ImageQueue, size: ").append(getCount());
for (ScheduledTask<IngestModuleImage> task : tasks) {
sb.append("DataSourceQueue, size: ").append(getCount());
for (ScheduledTask<IngestModuleDataSource> task : tasks) {
sb.append(task.toString()).append(" ");
}
return sb.toString();

View File

@ -19,7 +19,7 @@
package org.sleuthkit.autopsy.ingest;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
/**
* UI support for ingest
@ -29,7 +29,7 @@ public interface IngestUI {
int getMessagesCount();
void clearMessages();
void restoreMessages();
void displayIngestDialog(final Image image);
void displayIngestDialog(final Content ingestDataSource);
void displayReport(final String report);
}

View File

@ -25,7 +25,7 @@ import java.util.Objects;
* Stores information about a given pipeline, which is a series of modules.
* This is passed into modules for their reference.
*
* @param T type of the ingest associated with the context (file or image)
* @param T type of the ingest associated with the context (file or data source Content)
*
*/
public class PipelineContext <T extends IngestModuleAbstract> {

View File

@ -26,7 +26,7 @@ import org.sleuthkit.datamodel.Content;
* A task that will be scheduled. Contains the top-level data to analyze and the pipeline.
* Children of the data will also be scheduled.
*
* @param T type of Ingest Module / Pipeline (file or image) associated with this task
* @param T type of Ingest Module / Pipeline (file or data source content) associated with this task
*/
class ScheduledTask<T extends IngestModuleAbstract> {

View File

@ -27,7 +27,6 @@ import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
@ -36,20 +35,19 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
@ -81,27 +79,27 @@ public class Chrome extends Extract {
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
this.getHistory(image, controller);
this.getBookmark(image, controller);
this.getCookie(image, controller);
this.getLogin(image, controller);
this.getDownload(image, controller);
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
this.getLogin(dataSource, controller);
this.getDownload(dataSource, controller);
}
private void getHistory(Image image, IngestImageWorkerController controller) {
private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> historyFiles = null;
List<AbstractFile> historyFiles = null;
try {
historyFiles = fileManager.findFiles(image, "History", "Chrome");
historyFiles = fileManager.findFiles(dataSource, "History", "Chrome");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error when trying to get Chrome history files.", ex);
}
// get only the allocated ones, for now
List<FsContent> allocatedHistoryFiles = new ArrayList<>();
for (FsContent historyFile : historyFiles) {
List<AbstractFile> allocatedHistoryFiles = new ArrayList<>();
for (AbstractFile historyFile : historyFiles) {
if (historyFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)) {
allocatedHistoryFiles.add(historyFile);
}
@ -117,7 +115,7 @@ public class Chrome extends Extract {
while (j < historyFiles.size()) {
String temps = currentCase.getTempDirectory() + File.separator + historyFiles.get(j).getName().toString() + j + ".db";
int errors = 0;
final FsContent historyFile = historyFiles.get(j++);
final AbstractFile historyFile = historyFiles.get(j++);
if (historyFile.getSize() == 0) {
continue;
}
@ -160,12 +158,12 @@ public class Chrome extends Extract {
services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY));
}
private void getBookmark(Image image, IngestImageWorkerController controller) {
private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> bookmarkFiles = null;
List<AbstractFile> bookmarkFiles = null;
try {
bookmarkFiles = fileManager.findFiles(image, "Bookmarks", "Chrome");
bookmarkFiles = fileManager.findFiles(dataSource, "Bookmarks", "Chrome");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error when trying to get Chrome history files.", ex);
}
@ -173,7 +171,7 @@ public class Chrome extends Extract {
int j = 0;
if (bookmarkFiles != null && !bookmarkFiles.isEmpty()) {
while (j < bookmarkFiles.size()) {
FsContent bookmarkFile = bookmarkFiles.get(j++);
AbstractFile bookmarkFile = bookmarkFiles.get(j++);
String temps = currentCase.getTempDirectory() + File.separator + bookmarkFile.getName().toString() + j + ".db";
int errors = 0;
try {
@ -258,12 +256,12 @@ public class Chrome extends Extract {
//COOKIES section
// This gets the cookie info
private void getCookie(Image image, IngestImageWorkerController controller) {
private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> cookiesFiles = null;
List<AbstractFile> cookiesFiles = null;
try {
cookiesFiles = fileManager.findFiles(image, "Cookies", "Chrome");
cookiesFiles = fileManager.findFiles(dataSource, "Cookies", "Chrome");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error when trying to get Chrome history files.", ex);
}
@ -271,7 +269,7 @@ public class Chrome extends Extract {
int j = 0;
if (cookiesFiles != null && !cookiesFiles.isEmpty()) {
while (j < cookiesFiles.size()) {
FsContent cookiesFile = cookiesFiles.get(j++);
AbstractFile cookiesFile = cookiesFiles.get(j++);
String temps = currentCase.getTempDirectory() + File.separator + cookiesFile.getName().toString() + j + ".db";
int errors = 0;
try {
@ -318,12 +316,12 @@ public class Chrome extends Extract {
//Downloads section
// This gets the downloads info
private void getDownload(Image image, IngestImageWorkerController controller) {
private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> historyFiles = null;
List<AbstractFile> historyFiles = null;
try {
historyFiles = fileManager.findFiles(image, "History", "Chrome");
historyFiles = fileManager.findFiles(dataSource, "History", "Chrome");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error when trying to get Chrome history files.", ex);
}
@ -331,7 +329,7 @@ public class Chrome extends Extract {
int j = 0;
if (historyFiles != null && !historyFiles.isEmpty()) {
while (j < historyFiles.size()) {
FsContent historyFile = historyFiles.get(j++);
AbstractFile historyFile = historyFiles.get(j++);
if (historyFile.getSize() == 0) {
continue;
}
@ -354,7 +352,7 @@ public class Chrome extends Extract {
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "Recent Activity", (result.get("full_path").toString())));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "Recent Activity", Util.findID(image, (result.get("full_path").toString()))));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "Recent Activity", Util.findID(dataSource, (result.get("full_path").toString()))));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? result.get("url").toString() : "")));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "Recent Activity", ((result.get("url").toString() != null) ? EscapeUtil.decodeURL(result.get("url").toString()) : "")));
Long time = (Long.valueOf(result.get("start_time").toString()));
@ -382,12 +380,12 @@ public class Chrome extends Extract {
//Login/Password section
// This gets the user info
private void getLogin(Image image, IngestImageWorkerController controller) {
private void getLogin(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> signonFiles = null;
List<AbstractFile> signonFiles = null;
try {
signonFiles = fileManager.findFiles(image, "signons.sqlite", "Chrome");
signonFiles = fileManager.findFiles(dataSource, "signons.sqlite", "Chrome");
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "Error when trying to get Chrome history files.", ex);
}
@ -395,7 +393,7 @@ public class Chrome extends Extract {
int j = 0;
if (signonFiles != null && !signonFiles.isEmpty()) {
while (j < signonFiles.size()) {
FsContent signonFile = signonFiles.get(j++);
AbstractFile signonFile = signonFiles.get(j++);
String temps = currentCase.getTempDirectory() + File.separator + signonFile.getName().toString() + j + ".db";
int errors = 0;
try {
@ -450,7 +448,7 @@ public class Chrome extends Extract {
@Override
public void stop() {
logger.info("Attmped to stop chrome extract, but operation is not supported; skipping...");
logger.info("Attempted to stop chrome extract, but operation is not supported; skipping...");
}
@Override

View File

@ -30,11 +30,11 @@ import java.util.*;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.report.SQLiteDBConnect;
import org.sleuthkit.datamodel.*;
abstract public class Extract extends IngestModuleImage{
abstract public class Extract extends IngestModuleDataSource{
protected Case currentCase = Case.getCurrentCase(); // get the most updated case
protected SleuthkitCase tskCase = currentCase.getSleuthkitCase();
@ -51,62 +51,18 @@ abstract public class Extract extends IngestModuleImage{
return errorMessages;
}
/**
* Returns a List of FsContent objects from TSK based on sql query.
* Generic method for adding a blackboard artifact to the blackboard
*
* @param image is a Image object that denotes which image to get the files from
* @param query is a sql string query that is to be run
* @return FFSqlitedb is a List of FsContent objects
* @param type is a blackboard.artifact_type enum to determine which type
* the artifact should be
* @param content is the AbstractFile object that needs to have the artifact
* added for it
* @param bbattributes is the collection of blackboard attributes that need
* to be added to the artifact after the artifact has been created
*/
@SuppressWarnings("deprecation")
public List<FsContent> extractFiles(Image image, String query) {
Collection<FileSystem> imageFS = tskCase.getFileSystems(image);
List<String> fsIds = new LinkedList<String>();
for (FileSystem img : imageFS) {
Long tempID = img.getId();
fsIds.add(tempID.toString());
}
String allFS = new String();
for (int i = 0; i < fsIds.size(); i++) {
if (i == 0) {
allFS += " AND (0";
}
allFS += " OR fs_obj_id = '" + fsIds.get(i) + "'";
if (i == fsIds.size() - 1) {
allFS += ")";
}
}
List<FsContent> FFSqlitedb = null;
ResultSet rs = null;
try {
rs = tskCase.runQuery(query + allFS);
FFSqlitedb = tskCase.resultSetToFsContents(rs);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error while trying to extract files for:" + this.getClass().getName(), ex);
this.addErrorMessage(this.getName() + ": Error while trying to extract files to analyze.");
}
finally {
if (rs != null) {
try {
tskCase.closeRunQuery(rs);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Error while trying to close result set after extract files for:" + this.getClass().getName(), ex);
}
}
}
return FFSqlitedb;
}
/**
* Generic method for adding a blackboard artifact to the blackboard
*
* @param type is a blackboard.artifact_type enum to determine which type the artifact should be
* @param content is the FsContent object that needs to have the artifact added for it
* @param bbattributes is the collection of blackboard attributes that need to be added to the artifact after the artifact has been created
*/
public void addArtifact(BlackboardArtifact.ARTIFACT_TYPE type, FsContent content, Collection<BlackboardAttribute> bbattributes) {
public void addArtifact(BlackboardArtifact.ARTIFACT_TYPE type, AbstractFile content, Collection<BlackboardAttribute> bbattributes) {
try {
BlackboardArtifact bbart = content.newArtifact(type);
@ -116,8 +72,9 @@ abstract public class Extract extends IngestModuleImage{
}
}
/**
/**
* Returns a List from a result set based on sql query.
* This is used to query sqlite databases storing user recent activity data, such as in firefox sqlite db
*
* @param path is the string path to the sqlite db file
* @param query is a sql string query that is to be run
@ -140,12 +97,12 @@ abstract public class Extract extends IngestModuleImage{
}
/**
* Returns a List of FsContent objects from TSK based on sql query.
* Returns a List of AbstractFile objects from TSK based on sql query.
*
* @param rs is the resultset that needs to be converted to an arraylist
* @return list returns the arraylist built from the converted resultset
*/
public List<HashMap<String,Object>> resultSetToArrayList(ResultSet rs) throws SQLException {
private List<HashMap<String,Object>> resultSetToArrayList(ResultSet rs) throws SQLException {
ResultSetMetaData md = rs.getMetaData();
int columns = md.getColumnCount();
List<HashMap<String,Object>> list = new ArrayList<HashMap<String,Object>>(50);

View File

@ -41,7 +41,6 @@ import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.Scanner;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -54,7 +53,7 @@ import org.sleuthkit.autopsy.coreutils.JLNK;
import org.sleuthkit.autopsy.coreutils.JLnkParser;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.datamodel.KeyValue;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -62,11 +61,10 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.datamodel.*;
@ -76,14 +74,14 @@ public class ExtractIE extends Extract {
private IngestServices services;
private String recentQuery = "select * from `tsk_files` where parent_path LIKE '%/Recent%' and name LIKE '%.lnk'";
//sleauthkit db handle
SleuthkitCase tempDb;
SleuthkitCase tskCase;
//paths set in init()
private String PASCO_RESULTS_PATH;
private String PASCO_LIB_PATH;
private String JAVA_PATH;
//Results List to be referenced/used outside the class
public ArrayList<HashMap<String, Object>> PASCO_RESULTS_LIST = new ArrayList<HashMap<String, Object>>();
// List of Pasco result files for this image
// List of Pasco result files for this data source
private List<String> pascoResults;
//Look Up Table that holds Pasco2 results
private HashMap<String, Object> PASCO_RESULTS_LUT;
@ -107,29 +105,29 @@ public class ExtractIE extends Extract {
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
this.getHistory(image, controller);
this.getBookmark(image, controller);
this.getCookie(image, controller);
this.getRecentDocuments(image, controller);
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);
this.getCookie(dataSource, controller);
this.getRecentDocuments(dataSource, controller);
this.parsePascoResults(pascoResults);
}
//Favorites section
// This gets the favorite info
private void getBookmark(Image image, IngestImageWorkerController controller) {
private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) {
int errors = 0;
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> favoritesFiles = null;
List<AbstractFile> favoritesFiles = null;
try {
favoritesFiles = fileManager.findFiles(image, "%.url", "Favorites");
favoritesFiles = fileManager.findFiles(dataSource, "%.url", "Favorites");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.");
}
for (FsContent favoritesFile : favoritesFiles) {
for (AbstractFile favoritesFile : favoritesFiles) {
if (controller.isCancelled()) {
break;
}
@ -175,18 +173,18 @@ public class ExtractIE extends Extract {
//Cookies section
// This gets the cookies info
private void getCookie(Image image, IngestImageWorkerController controller) {
private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> cookiesFiles = null;
List<AbstractFile> cookiesFiles = null;
try {
cookiesFiles = fileManager.findFiles(image, "%.txt", "Cookies");
cookiesFiles = fileManager.findFiles(dataSource, "%.txt", "Cookies");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.");
}
int errors = 0;
for (FsContent cookiesFile : cookiesFiles) {
for (AbstractFile cookiesFile : cookiesFiles) {
if (controller.isCancelled()) {
break;
}
@ -230,17 +228,17 @@ public class ExtractIE extends Extract {
//Recent Documents section
// This gets the recent object info
private void getRecentDocuments(Image image, IngestImageWorkerController controller) {
private void getRecentDocuments(Content dataSource, IngestDataSourceWorkerController controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> recentFiles = null;
List<AbstractFile> recentFiles = null;
try {
recentFiles = fileManager.findFiles(image, "%.lnk", "Recent");
recentFiles = fileManager.findFiles(dataSource, "%.lnk", "Recent");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.");
}
for (FsContent recentFile : recentFiles) {
for (AbstractFile recentFile : recentFiles) {
if (controller.isCancelled()) {
break;
}
@ -264,7 +262,7 @@ public class ExtractIE extends Extract {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", path));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), "RecentActivity", Util.getFileName(path)));
long id = Util.findID(image, path);
long id = Util.findID(dataSource, path);
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", id));
//TODO Revisit usage of deprecated constructor as per TSK-583
//bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME.getTypeID(), "RecentActivity", "Date Created", datetime));
@ -280,9 +278,10 @@ public class ExtractIE extends Extract {
return IE_PASCO_LUT;
}
private void getHistory(Image image, IngestImageWorkerController controller) {
final Case currentCase = Case.getCurrentCase();
final String caseDir = Case.getCurrentCase().getCaseDirectory();
private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) {
currentCase = Case.getCurrentCase();
tskCase = currentCase.getSleuthkitCase();
PASCO_RESULTS_PATH = Case.getCurrentCase().getTempDirectory() + File.separator + "results";
JAVA_PATH = PlatformUtil.getJavaPath();
pascoResults = new ArrayList<String>();
@ -307,37 +306,19 @@ public class ExtractIE extends Extract {
File resultsDir = new File(PASCO_RESULTS_PATH);
resultsDir.mkdirs();
tempDb = currentCase.getSleuthkitCase();
Collection<FileSystem> imageFS = tempDb.getFileSystems(image);
List<String> fsIds = new LinkedList<String>();
for (FileSystem img : imageFS) {
Long tempID = img.getId();
fsIds.add(tempID.toString());
}
String allFS = new String();
for (int i = 0; i < fsIds.size(); i++) {
if (i == 0) {
allFS += " AND (0";
}
allFS += " OR fs_obj_id = '" + fsIds.get(i) + "'";
if (i == fsIds.size() - 1) {
allFS += ")";
}
}
// get index.dat files
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> indexFiles = null;
List<AbstractFile> indexFiles = null;
try {
indexFiles = fileManager.findFiles(image, "index.dat");
indexFiles = fileManager.findFiles(dataSource, "index.dat");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.");
}
String temps;
String indexFileName;
for (FsContent indexFile : indexFiles) {
for (AbstractFile indexFile : indexFiles) {
// Since each result represent an index.dat file,
// just create these files with the following notation:
// index<Number>.dat (i.e. index0.dat, index1.dat,..., indexN.dat)
@ -497,7 +478,7 @@ public class ExtractIE extends Extract {
// TODO: Need to fix this so we have the right obj_id
try {
BlackboardArtifact bbart = tempDb.getContentById(artObjId).newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY);
BlackboardArtifact bbart = tskCase.getContentById(artObjId).newArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY);
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), "RecentActivity", realurl));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED.getTypeID(), "RecentActivity", EscapeUtil.decodeURL(realurl)));

View File

@ -37,8 +37,8 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.PipelineContext;
@ -103,12 +103,12 @@ public class ExtractRegistry extends Extract {
}
private void getRegistryFiles(Image image, IngestImageWorkerController controller) {
private void getRegistryFiles(Content dataSource, IngestDataSourceWorkerController controller) {
org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> allRegistryFiles = new ArrayList<FsContent>();
List<AbstractFile> allRegistryFiles = new ArrayList<AbstractFile>();
try {
allRegistryFiles.addAll(fileManager.findFiles(image, "ntuser.dat"));
allRegistryFiles.addAll(fileManager.findFiles(dataSource, "ntuser.dat"));
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'ntuser.dat' file.");
}
@ -118,14 +118,14 @@ public class ExtractRegistry extends Extract {
String[] regFileNames = new String[] {"system", "software", "security", "sam", "default"};
for (String regFileName : regFileNames) {
try {
allRegistryFiles.addAll(fileManager.findFiles(image, regFileName, "%/system32/config%"));
allRegistryFiles.addAll(fileManager.findFiles(dataSource, regFileName, "%/system32/config%"));
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching registry file: " + regFileName);
}
}
int j = 0;
for (FsContent regFile : allRegistryFiles) {
for (AbstractFile regFile : allRegistryFiles) {
String regFileName = regFile.getName();
String temps = currentCase.getTempDirectory() + "\\" + regFileName;
try {
@ -396,8 +396,8 @@ public class ExtractRegistry extends Extract {
}
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
this.getRegistryFiles(image, controller);
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
this.getRegistryFiles(dataSource, controller);
}
@Override

View File

@ -35,17 +35,17 @@ import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.autopsy.coreutils.EscapeUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -73,22 +73,22 @@ public class Firefox extends Extract {
}
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
this.getHistory(image, controller);
this.getBookmark(image, controller);
this.getDownload(image, controller);
this.getCookie(image, controller);
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
this.getHistory(dataSource, controller);
this.getBookmark(dataSource, controller);
this.getDownload(dataSource, controller);
this.getCookie(dataSource, controller);
}
private void getHistory(Image image, IngestImageWorkerController controller) {
private void getHistory(Content dataSource, IngestDataSourceWorkerController controller) {
//Make these seperate, this is for history
//List<FsContent> FFSqlitedb = this.extractFiles(image, "select * from tsk_files where name LIKE '%places.sqlite%' and name NOT LIKE '%journal%' and parent_path LIKE '%Firefox%'");
//List<FsContent> FFSqlitedb = this.extractFiles(dataSource, "select * from tsk_files where name LIKE '%places.sqlite%' and name NOT LIKE '%journal%' and parent_path LIKE '%Firefox%'");
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> historyFiles = null;
List<AbstractFile> historyFiles = null;
try {
historyFiles = fileManager.findFiles(image, "%places.sqlite%", "Firefox");
historyFiles = fileManager.findFiles(dataSource, "%places.sqlite%", "Firefox");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching internet history files for Firefox.");
}
@ -98,7 +98,7 @@ public class Firefox extends Extract {
}
int j = 0;
for (FsContent historyFile : historyFiles) {
for (AbstractFile historyFile : historyFiles) {
String fileName = historyFile.getName();
String temps = currentCase.getTempDirectory() + File.separator + fileName + j + ".db";
int errors = 0;
@ -139,12 +139,12 @@ public class Firefox extends Extract {
services.fireModuleDataEvent(new ModuleDataEvent("Recent Activity", BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY));
}
private void getBookmark(Image image, IngestImageWorkerController controller) {
private void getBookmark(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> bookmarkFiles = null;
List<AbstractFile> bookmarkFiles = null;
try {
bookmarkFiles = fileManager.findFiles(image, "places.sqlite", "Firefox");
bookmarkFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching bookmark files for Firefox.");
}
@ -154,7 +154,7 @@ public class Firefox extends Extract {
}
int j = 0;
for (FsContent bookmarkFile : bookmarkFiles) {
for (AbstractFile bookmarkFile : bookmarkFiles) {
String fileName = bookmarkFile.getName();
String temps = currentCase.getTempDirectory() + File.separator + fileName + j + ".db";
int errors = 0;
@ -194,12 +194,12 @@ public class Firefox extends Extract {
//COOKIES section
// This gets the cookie info
private void getCookie(Image image, IngestImageWorkerController controller) {
private void getCookie(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> cookiesFiles = null;
List<AbstractFile> cookiesFiles = null;
try {
cookiesFiles = fileManager.findFiles(image, "cookies.sqlite", "Firefox");
cookiesFiles = fileManager.findFiles(dataSource, "cookies.sqlite", "Firefox");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching cookies files for Firefox.");
}
@ -209,7 +209,7 @@ public class Firefox extends Extract {
}
int j = 0;
for (FsContent cookiesFile : cookiesFiles) {
for (AbstractFile cookiesFile : cookiesFiles) {
String fileName = cookiesFile.getName();
String temps = currentCase.getTempDirectory() + File.separator + fileName + j + ".db";
int errors = 0;
@ -270,12 +270,12 @@ public class Firefox extends Extract {
//Downloads section
// This gets the downloads info
private void getDownload(Image image, IngestImageWorkerController controller) {
private void getDownload(Content dataSource, IngestDataSourceWorkerController controller) {
FileManager fileManager = currentCase.getServices().getFileManager();
List<FsContent> downloadsFiles = null;
List<AbstractFile> downloadsFiles = null;
try {
downloadsFiles = fileManager.findFiles(image, "downloads.sqlite", "Firefox");
downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox");
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'downloads' files for Firefox.");
}
@ -285,7 +285,7 @@ public class Firefox extends Extract {
}
int j = 0;
for (FsContent downloadsFile : downloadsFiles) {
for (AbstractFile downloadsFile : downloadsFiles) {
String fileName = downloadsFile.getName();
String temps = currentCase.getTempDirectory() + File.separator + fileName + j + ".db";
int errors = 0;
@ -312,7 +312,7 @@ public class Firefox extends Extract {
//TODO Revisit usage of deprecated constructor as per TSK-583
//bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "RecentActivity", "Last Visited", (Long.valueOf(result.get("startTime").toString()))));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), "RecentActivity", (Long.valueOf(result.get("startTime").toString()))));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(image, urldecodedtarget)));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH_ID.getTypeID(), "RecentActivity", Util.findID(dataSource, urldecodedtarget)));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), "RecentActivity", ((result.get("target").toString() != null) ? result.get("target").toString() : "")));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), "RecentActivity", "FireFox"));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), "RecentActivity", (Util.extractDomain((result.get("source").toString() != null) ? result.get("source").toString() : ""))));

View File

@ -26,19 +26,19 @@ import java.util.ArrayList;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.Content;
/**
* Recent activity image ingest module
*
*/
public final class RAImageIngestModule extends IngestModuleImage {
public final class RAImageIngestModule extends IngestModuleDataSource {
private static final Logger logger = Logger.getLogger(RAImageIngestModule.class.getName());
private static RAImageIngestModule defaultInstance = null;
@ -55,8 +55,8 @@ public final class RAImageIngestModule extends IngestModuleImage {
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Started " + image.getName()));
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Started " + dataSource.getName()));
controller.switchToDeterminate(modules.size());
controller.progress(0);
@ -69,7 +69,7 @@ public final class RAImageIngestModule extends IngestModuleImage {
break;
}
try {
module.process(pipelineContext, image, controller);
module.process(pipelineContext, dataSource, controller);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Exception occurred in " + module.getName(), ex);
subCompleted.append(module.getName()).append(" failed - see log for details <br>");
@ -97,7 +97,7 @@ public final class RAImageIngestModule extends IngestModuleImage {
errorMessage.append("No errors encountered.");
errorMsgSubject = "No errors reported";
}
final IngestMessage msg = IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Finished " + image.getName()+ " - " + errorMsgSubject, errorMessage.toString());
final IngestMessage msg = IngestMessage.createMessage(++messageId, MessageType.INFO, this, "Finished " + dataSource.getName()+ " - " + errorMsgSubject, errorMessage.toString());
services.postMessage(msg);
}

View File

@ -31,25 +31,24 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import javax.swing.JPanel;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.XMLUtil;
import org.sleuthkit.autopsy.ingest.PipelineContext;
import org.sleuthkit.autopsy.ingest.IngestImageWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleAbstract;
import org.sleuthkit.autopsy.ingest.IngestModuleImage;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.TskException;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
@ -241,7 +240,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract {
return basereturn;
}
private void getURLs(Image image, IngestImageWorkerController controller) {
private void getURLs(Content dataSource, IngestDataSourceWorkerController controller) {
int totalQueries = 0;
try {
//from blackboard_artifacts
@ -255,12 +254,19 @@ public class SearchEngineURLQueryAnalyzer extends Extract {
String searchEngineDomain = "";
String browser = "";
long last_accessed = -1;
//from tsk_files
List<FsContent> fslst = this.extractFiles(image, "select * from tsk_files where `obj_id` = '" + artifact.getObjectID() + "'");
if (fslst.isEmpty() || fslst == null) {
continue; //File was from a different image, and does not exist in current examination. Skipping to a new list of artifacts.
long fileId = artifact.getObjectID();
boolean isFromSource = tskCase.isFileFromSource(dataSource, fileId);
if (!isFromSource) {
//File was from a different dataSource. Skipping.
continue;
}
FsContent fs = fslst.get(0); //associated file
AbstractFile file = tskCase.getAbstractFileById(fileId);
if (file == null ) {
continue;
}
SearchEngineURLQueryAnalyzer.SearchEngine se = NullEngine;
//from blackboard_attributes
Collection<BlackboardAttribute> listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID());
@ -295,7 +301,7 @@ public class SearchEngineURLQueryAnalyzer extends Extract {
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_TEXT.getTypeID(), MODULE_NAME, query));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID(), MODULE_NAME, browser));
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED.getTypeID(), MODULE_NAME, last_accessed));
this.addArtifact(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, fs, bbattributes);
this.addArtifact(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, file, bbattributes);
se.increment();
++totalQueries;
}
@ -323,8 +329,8 @@ public class SearchEngineURLQueryAnalyzer extends Extract {
}
@Override
public void process(PipelineContext<IngestModuleImage>pipelineContext, Image image, IngestImageWorkerController controller) {
this.getURLs(image, controller);
public void process(PipelineContext<IngestModuleDataSource>pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
this.getURLs(dataSource, controller);
logger.info("Search Engine stats: \n" + getTotals());
}

View File

@ -41,9 +41,9 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.services.FileManager;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.autopsy.report.SQLiteDBConnect;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.TskCoreException;
/**
@ -187,7 +187,7 @@ public class Util {
return path;
}
public static long findID(Image image, String path) {
public static long findID(Content dataSource, String path) {
String parent_path = path.replace('\\', '/'); // fix Chrome paths
if (parent_path.length() > 2 && parent_path.charAt(1) == ':') {
parent_path = parent_path.substring(2); // remove drive letter (e.g., 'C:')
@ -198,9 +198,9 @@ public class Util {
//String query = "select * from tsk_files where parent_path like \"" + parent_path + "\" AND name like \"" + name + "\"";
FileManager fileManager = Case.getCurrentCase().getServices().getFileManager();
List<FsContent> files = null;
List<AbstractFile> files = null;
try {
files = fileManager.findFiles(image, name, parent_path);
files = fileManager.findFiles(dataSource, name, parent_path);
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Error fetching 'index.data' files for Internet Explorer history.");
}