Work towards converting core ingest modules to new ingest API

This commit is contained in:
Richard Cordovano 2014-03-03 22:45:48 -05:00
parent bb2f26d8af
commit 2b95138f70
15 changed files with 1066 additions and 2006 deletions

View File

@ -32,6 +32,5 @@ public interface DataSourceIngestModule extends IngestModule {
* @param statusHelper A status helper to be used to report progress and
* detect task cancellation.
*/
// void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class
void process(Content dataSource);
void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class
}

View File

@ -18,157 +18,154 @@
*/
package org.sleuthkit.autopsy.ingest;
//import java.awt.EventQueue;
//import java.util.concurrent.locks.Lock;
//import java.util.concurrent.locks.ReentrantReadWriteLock;
//import java.util.logging.Level;
//import org.sleuthkit.autopsy.coreutils.Logger;
//import javax.swing.SwingWorker;
//import org.netbeans.api.progress.ProgressHandle;
//import org.netbeans.api.progress.ProgressHandleFactory;
//import org.openide.util.Cancellable;
//import org.sleuthkit.autopsy.coreutils.PlatformUtil;
//import org.sleuthkit.autopsy.coreutils.StopWatch;
//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
//import org.sleuthkit.datamodel.Content;
import java.awt.EventQueue;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.Logger;
import javax.swing.SwingWorker;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.util.Cancellable;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.coreutils.StopWatch;
import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
import org.sleuthkit.datamodel.Content;
/**
* Worker thread that runs a data source-level ingest module (image, file set virt dir, etc).
* Used to process only a single data-source and single module.
*/
// class IngestDataSourceThread extends SwingWorker<Void, Void> {
//
// private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName());
// private ProgressHandle progress;
// private final Content dataSource;
// private final DataSourceIngestModule module;
// private IngestDataSourceWorkerController controller;
// private final IngestManager manager;
// private final IngestModuleInit init;
// private boolean inited;
// //current method of enqueuing data source ingest modules with locks and internal lock queue
// //ensures that we init, run and complete a single data source ingest module at a time
// //uses fairness policy to run them in order enqueued
// private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
//
// IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module, IngestModuleInit init) {
// this.manager = manager;
// this.dataSource = dataSource;
// this.module = module;
// this.init = init;
// this.inited = false;
// }
//
// Content getContent() {
// return dataSource;
// }
//
// DataSourceIngestModule getModule() {
// return module;
// }
//
// public void init() {
//
// logger.log(Level.INFO, "Initializing module: " + module.getName());
// try {
// module.init(dataSource.getId());
// inited = true;
// } catch (Exception e) {
// logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run.");
// //will not run
// inited = false;
// throw e;
// }
// }
//
// @Override
// protected Void doInBackground() throws Exception {
//
// logger.log(Level.INFO, "Pending module: " + module.getName());
//
// final String displayName = module.getName() + " dataSource id:" + dataSource.getId();
// progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() {
// @Override
// public boolean cancel() {
// logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user.");
// if (progress != null) {
// progress.setDisplayName(displayName + " (Cancelling...)");
// }
// return IngestDataSourceThread.this.cancel(true);
// }
// });
// progress.start();
// progress.switchToIndeterminate();
//
// dataSourceIngestModuleLock.lock();
// try {
// if (this.isCancelled()) {
// logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName());
// return Void.TYPE.newInstance();
// }
// logger.log(Level.INFO, "Starting module: " + module.getName());
// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
// progress.setDisplayName(displayName);
//
// if (inited == false) {
// logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName());
// return Void.TYPE.newInstance();
// }
// logger.log(Level.INFO, "Starting processing of module: " + module.getName());
//
// controller = new IngestDataSourceWorkerController(this, progress);
//
// if (isCancelled()) {
// logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation.");
// return Void.TYPE.newInstance();
// }
// final StopWatch timer = new StopWatch();
// timer.start();
// try {
// // RJCTODO
//// module.process(pipelineContext, dataSource, controller);
// } catch (Exception e) {
// logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e);
// } finally {
// timer.stop();
// logger.log(Level.INFO, "Done processing of module: " + module.getName()
// + " took " + timer.getElapsedTimeSecs() + " secs. to process()");
//
//
// //cleanup queues (worker and DataSource/module)
// manager.removeDataSourceIngestWorker(this);
//
// if (!this.isCancelled()) {
// logger.log(Level.INFO, "Module " + module.getName() + " completed");
// try {
// module.complete();
// } catch (Exception e) {
// logger.log(Level.INFO, "Error completing the module " + module.getName(), e);
// }
// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName());
// } else {
// logger.log(Level.INFO, "Module " + module.getName() + " stopped");
// try {
// module.stop();
// } catch (Exception e) {
// logger.log(Level.INFO, "Error stopping the module" + module.getName(), e);
// }
// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName());
// }
//
// }
// return Void.TYPE.newInstance();
// } finally {
// //release the lock so next module can run
// dataSourceIngestModuleLock.unlock();
// EventQueue.invokeLater(new Runnable() {
// @Override
// public void run() {
// progress.finish();
// }
// });
// logger.log(Level.INFO, "Done running module: " + module.getName());
// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
// }
// }
//}
class IngestDataSourceThread extends SwingWorker<Void, Void> {
private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName());
private ProgressHandle progress;
private final Content dataSource;
private final DataSourceIngestModule module;
private IngestDataSourceWorkerController controller;
private final IngestManager manager;
private boolean inited;
//current method of enqueuing data source ingest modules with locks and internal lock queue
//ensures that we init, run and complete a single data source ingest module at a time
//uses fairness policy to run them in order enqueued
private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module) {
this.manager = manager;
this.dataSource = dataSource;
this.module = module;
this.inited = false;
}
Content getContent() {
return dataSource;
}
DataSourceIngestModule getModule() {
return module;
}
public void init() {
logger.log(Level.INFO, "Initializing module: {0}", module.getDisplayName());
try {
module.init(dataSource.getId());
inited = true;
} catch (Exception e) {
logger.log(Level.INFO, "Failed initializing module: {0}, will not run.", module.getDisplayName());
//will not run
inited = false;
throw e;
}
}
@Override
protected Void doInBackground() throws Exception {
logger.log(Level.INFO, "Pending module: {0}", module.getDisplayName());
final String displayName = module.getDisplayName() + " dataSource id:" + dataSource.getId();
progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() {
@Override
public boolean cancel() {
logger.log(Level.INFO, "DataSource ingest module {0} cancelled by user.", module.getDisplayName());
if (progress != null) {
progress.setDisplayName(displayName + " (Cancelling...)");
}
return IngestDataSourceThread.this.cancel(true);
}
});
progress.start();
progress.switchToIndeterminate();
dataSourceIngestModuleLock.lock();
try {
if (this.isCancelled()) {
logger.log(Level.INFO, "Cancelled while pending, module: {0}", module.getDisplayName());
return Void.TYPE.newInstance();
}
logger.log(Level.INFO, "Starting module: {0}", module.getDisplayName());
logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
progress.setDisplayName(displayName);
if (inited == false) {
logger.log(Level.INFO, "Module wasn''t initialized, will not run: {0}", module.getDisplayName());
return Void.TYPE.newInstance();
}
logger.log(Level.INFO, "Starting processing of module: {0}", module.getDisplayName());
controller = new IngestDataSourceWorkerController(this, progress);
if (isCancelled()) {
logger.log(Level.INFO, "Terminating DataSource ingest module {0} due to cancellation.", module.getDisplayName());
return Void.TYPE.newInstance();
}
final StopWatch timer = new StopWatch();
timer.start();
try {
// RJCTODO
// module.process(pipelineContext, dataSource, controller);
} catch (Exception e) {
logger.log(Level.WARNING, "Exception in module: " + module.getDisplayName() + " DataSource: " + dataSource.getName(), e);
} finally {
timer.stop();
logger.log(Level.INFO, "Done processing of module: {0} took {1} secs. to process()", new Object[]{module.getDisplayName(), timer.getElapsedTimeSecs()});
//cleanup queues (worker and DataSource/module)
manager.removeDataSourceIngestWorker(this);
if (!this.isCancelled()) {
logger.log(Level.INFO, "Module {0} completed", module.getDisplayName());
try {
module.complete();
} catch (Exception e) {
logger.log(Level.INFO, "Error completing the module " + module.getDisplayName(), e);
}
IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getDisplayName());
} else {
logger.log(Level.INFO, "Module {0} stopped", module.getDisplayName());
try {
module.stop();
} catch (Exception e) {
logger.log(Level.INFO, "Error stopping the module" + module.getDisplayName(), e);
}
IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getDisplayName());
}
}
return Void.TYPE.newInstance();
} finally {
//release the lock so next module can run
dataSourceIngestModuleLock.unlock();
EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
progress.finish();
}
});
logger.log(Level.INFO, "Done running module: {0}", module.getDisplayName());
logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
}
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Copyright 2011-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -18,64 +18,64 @@
*/
package org.sleuthkit.autopsy.ingest;
//import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandle;
// RJCTODO: Rework or replace this code
// RJCTODO: This could use a renaming, really don't want it long-term, but maybe need to keep it for 3.1 DISCUSS
/**
* Controller for DataSource level ingest modules
* Used by modules to check task status and to post progress to
*/
//public class IngestDataSourceWorkerController {
//
// private IngestDataSourceThread worker;
// private ProgressHandle progress;
//
// /**
// * Instantiate the controller for the worker
// * @param worker underlying DataSource ingest thread
// * @param progress the progress handle
// */
// IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) {
// this.worker = worker;
// this.progress = progress;
// }
//
// /**
// * Check if the task has been cancelled. This should be polled by the module periodically
// * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup
// *
// * @return true if the task has been cancelled, false otherwise
// */
// public boolean isCancelled() {
// return worker.isCancelled();
// }
//
// /**
// * Update the progress bar and switch to determinate mode once number of total work units is known
// * @param workUnits total number of work units for the DataSource ingest task
// */
// public void switchToDeterminate(int workUnits) {
// if (progress != null) {
// progress.switchToDeterminate(workUnits);
// }
// }
//
// /**
// * Update the progress bar and switch to non determinate mode if number of work units is not known
// */
// public void switchToInDeterminate() {
// if (progress != null) {
// progress.switchToIndeterminate();
// }
// }
//
// /**
// * Update the progress bar with the number of work units performed, if in the determinate mode
// * @param workUnits number of work units performed so far by the module
// */
// public void progress(int workUnits) {
// if (progress != null) {
// progress.progress(worker.getContent().getName(), workUnits);
// }
// }
//}
public class IngestDataSourceWorkerController {
private IngestDataSourceThread worker;
private ProgressHandle progress;
/**
* Instantiate the controller for the worker
* @param worker underlying DataSource ingest thread
* @param progress the progress handle
*/
IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) {
this.worker = worker;
this.progress = progress;
}
/**
* Check if the task has been canceled. This should be polled by the module periodically
* And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup
*
* @return true if the task has been canceled, false otherwise
*/
public boolean isCancelled() {
return worker.isCancelled();
}
/**
* Update the progress bar and switch to determinate mode once number of total work units is known
* @param workUnits total number of work units for the DataSource ingest task
*/
public void switchToDeterminate(int workUnits) {
if (progress != null) {
progress.switchToDeterminate(workUnits);
}
}
/**
* Update the progress bar and switch to non determinate mode if number of work units is not known
*/
public void switchToInDeterminate() {
if (progress != null) {
progress.switchToIndeterminate();
}
}
/**
* Update the progress bar with the number of work units performed, if in the determinate mode
* @param workUnits number of work units performed so far by the module
*/
public void progress(int workUnits) {
if (progress != null) {
progress.progress(worker.getContent().getName(), workUnits);
}
}
}

View File

@ -58,7 +58,7 @@ public class IngestManager {
// private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement
private final IngestScheduler scheduler;
private IngestAbstractFileProcessor abstractFileIngester;
// private List<IngestDataSourceThread> dataSourceIngesters; // RJCTODO: Adapt to new paradigm
private List<IngestDataSourceThread> dataSourceIngesters;
private SwingWorker<Object, Void> queueWorker;
// private final Map<String, IngestModuleAbstractFile.ProcessResult> abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete
private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class);
@ -672,14 +672,13 @@ public class IngestManager {
}
}
// RJCTODO: Data source ingest is temporarily disabled
//data source worker to remove itself when complete or interrupted
// void removeDataSourceIngestWorker(IngestDataSourceThread worker) {
// //remove worker
// synchronized (this) {
// dataSourceIngesters.remove(worker);
// }
// }
void removeDataSourceIngestWorker(IngestDataSourceThread worker) {
//remove worker
synchronized (this) {
dataSourceIngesters.remove(worker);
}
}
// RJCTODO: Decide whether or not to reimplement this class
/**

View File

@ -1,49 +0,0 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
/**
*
* Context passed to a module at initialization time.
* It may contain module configuration required to initialize some modules.
*/
public class IngestModuleInit {
// private String moduleArgs;
/**
* Get module arguments
* @return module args string, used by some modules
*/
// public String getModuleArgs() {
// return moduleArgs;
// }
/**
* Sets module args. string (only used by module pipeline)
* @param moduleArgs arguments to set for the module
*/
// void setModuleArgs(String moduleArgs) {
// this.moduleArgs = moduleArgs;
// }
//
}

File diff suppressed because it is too large Load Diff

View File

@ -6,6 +6,14 @@
<code-name-base>org.sleuthkit.autopsy.exifparser</code-name-base>
<suite-component/>
<module-dependencies>
<dependency>
<code-name-base>org.openide.util.lookup</code-name-base>
<build-prerequisite/>
<compile-dependency/>
<run-dependency>
<specification-version>8.19.1</specification-version>
</run-dependency>
</dependency>
<dependency>
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
<build-prerequisite/>

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2013 Basis Technology Corp.
* Copyright 2011-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -16,229 +16,193 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package org.sleuthkit.autopsy.exifparser;
//
//import com.drew.imaging.ImageMetadataReader;
//import com.drew.imaging.ImageProcessingException;
//import com.drew.lang.GeoLocation;
//import com.drew.lang.Rational;
//import com.drew.metadata.Metadata;
//import com.drew.metadata.exif.ExifIFD0Directory;
//import com.drew.metadata.exif.ExifSubIFDDirectory;
//import com.drew.metadata.exif.GpsDirectory;
//import java.io.BufferedInputStream;
//import java.io.IOException;
//import java.io.InputStream;
//import java.util.ArrayList;
//import java.util.Collection;
//import java.util.Date;
//import java.util.logging.Level;
//import org.sleuthkit.autopsy.coreutils.ImageUtils;
//import org.sleuthkit.autopsy.coreutils.Logger;
//import org.sleuthkit.autopsy.coreutils.Version;
//import org.sleuthkit.autopsy.ingest.IngestServices;
//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
//import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
//import org.sleuthkit.datamodel.AbstractFile;
//import org.sleuthkit.datamodel.BlackboardArtifact;
//import org.sleuthkit.datamodel.BlackboardAttribute;
//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
//import org.sleuthkit.datamodel.ReadContentInputStream;
//import org.sleuthkit.datamodel.TskCoreException;
//import org.sleuthkit.datamodel.TskData;
//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
package org.sleuthkit.autopsy.exifparser;
import com.drew.imaging.ImageMetadataReader;
import com.drew.imaging.ImageProcessingException;
import com.drew.lang.GeoLocation;
import com.drew.lang.Rational;
import com.drew.metadata.Metadata;
import com.drew.metadata.exif.ExifIFD0Directory;
import com.drew.metadata.exif.ExifSubIFDDirectory;
import com.drew.metadata.exif.GpsDirectory;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.logging.Level;
import org.sleuthkit.autopsy.coreutils.ImageUtils;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.ReadContentInputStream;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
/**
* Ingest module to parse image Exif metadata. Currently only supports JPEG
* files. Ingests an image file and, if available, adds it's date, latitude,
* longitude, altitude, device model, and device make to a blackboard artifact.
*/
//public final class ExifParserFileIngestModule extends IngestModuleAbstractFile {
//
// private IngestServices services;
// final public static String MODULE_NAME = "Exif Parser";
// final public static String MODULE_VERSION = Version.getVersion();
// private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
// private static ExifParserFileIngestModule defaultInstance = null;
// private int filesProcessed = 0;
// private boolean filesToFire = false;
//
// //file ingest modules require a private constructor
// //to ensure singleton instances
// private ExifParserFileIngestModule() {
// }
//
// //default instance used for module registration
// public static synchronized ExifParserFileIngestModule getDefault() {
// if (defaultInstance == null) {
// defaultInstance = new ExifParserFileIngestModule();
// }
// return defaultInstance;
// }
//
// @Override
// public IngestModuleAbstractFile.ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile content) {
//
// //skip unalloc
// if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
// return IngestModuleAbstractFile.ProcessResult.OK;
// }
//
// // skip known
// if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
// return IngestModuleAbstractFile.ProcessResult.OK;
// }
//
// // update the tree every 1000 files if we have EXIF data that is not being being displayed
// filesProcessed++;
// if ((filesToFire) && (filesProcessed % 1000 == 0)) {
// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
// filesToFire = false;
// }
//
// //skip unsupported
// if (!parsableFormat(content)) {
// return IngestModuleAbstractFile.ProcessResult.OK;
// }
//
// return processFile(content);
// }
//
// public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) {
// InputStream in = null;
// BufferedInputStream bin = null;
//
// try {
// in = new ReadContentInputStream(f);
// bin = new BufferedInputStream(in);
//
// Collection<BlackboardAttribute> attributes = new ArrayList<BlackboardAttribute>();
// Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
//
// // Date
// ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
// if (exifDir != null) {
// Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
// if (date != null) {
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000));
// }
// }
//
// // GPS Stuff
// GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
// if (gpsDir != null) {
// GeoLocation loc = gpsDir.getGeoLocation();
// if (loc != null) {
// double latitude = loc.getLatitude();
// double longitude = loc.getLongitude();
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude));
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude));
// }
//
// Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
// if (altitude != null) {
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue()));
// }
// }
//
// // Device info
// ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
// if (devDir != null) {
// String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
// if (model != null && !model.isEmpty()) {
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model));
// }
//
// String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
// if (make != null && !make.isEmpty()) {
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make));
// }
// }
//
// // Add the attributes, if there are any, to a new artifact
// if (!attributes.isEmpty()) {
// BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
// bba.addAttributes(attributes);
// filesToFire = true;
// }
//
// return IngestModuleAbstractFile.ProcessResult.OK;
//
// } catch (TskCoreException ex) {
// logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ").");
// } catch (ImageProcessingException ex) {
// logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")");
// } catch (IOException ex) {
// logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex);
// } finally {
// try {
// if (in != null) {
// in.close();
// }
// if (bin != null) {
// bin.close();
// }
// } catch (IOException ex) {
// logger.log(Level.WARNING, "Failed to close InputStream.", ex);
// }
// }
//
// // If we got here, there was an error
// return IngestModuleAbstractFile.ProcessResult.ERROR;
// }
//
// /**
// * Checks if should try to attempt to extract exif. Currently checks if JPEG
// * image (by signature)
// *
// * @param f file to be checked
// *
// * @return true if to be processed
// */
// private boolean parsableFormat(AbstractFile f) {
// return ImageUtils.isJpegFileHeader(f);
// }
//
// @Override
// public void complete() {
// logger.log(Level.INFO, "completed exif parsing " + this.toString());
// if (filesToFire) {
// //send the final new data event
// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
// }
// }
//
// @Override
// public String getVersion() {
// return MODULE_VERSION;
// }
//
// @Override
// public String getName() {
// return "Exif Image Parser";
// }
//
// @Override
// public String getDescription() {
// return "Ingests JPEG files and retrieves their EXIF metadata.";
// }
//
// @Override
// public void init(IngestModuleInit initContext) {
// services = IngestServices.getDefault();
// logger.log(Level.INFO, "init() " + this.toString());
//
// filesProcessed = 0;
// filesToFire = false;
// }
//
// @Override
// public void stop() {
// }
//
// @Override
// public boolean hasBackgroundJobsRunning() {
// return false;
// }
//}
public final class ExifParserFileIngestModule implements FileIngestModule {
private IngestServices services;
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
private int filesProcessed = 0;
private boolean filesToFire = false;
ExifParserFileIngestModule() {
}
@Override
public String getDisplayName() {
return ExifParserModuleFactory.getModuleName();
}
@Override
public void init(long taskId) {
services = IngestServices.getDefault();
logger.log(Level.INFO, "init() {0}", this.toString());
filesProcessed = 0;
filesToFire = false;
}
@Override
public void process(AbstractFile content) {
//skip unalloc
if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
return;
}
// skip known
if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
return;
}
// update the tree every 1000 files if we have EXIF data that is not being being displayed
filesProcessed++;
if ((filesToFire) && (filesProcessed % 1000 == 0)) {
services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
filesToFire = false;
}
//skip unsupported
if (!parsableFormat(content)) {
return;
}
processFile(content);
}
public void processFile(AbstractFile f) {
InputStream in = null;
BufferedInputStream bin = null;
try {
in = new ReadContentInputStream(f);
bin = new BufferedInputStream(in);
Collection<BlackboardAttribute> attributes = new ArrayList<BlackboardAttribute>();
Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
// Date
ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
if (exifDir != null) {
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
if (date != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), getDisplayName(), date.getTime() / 1000));
}
}
// GPS Stuff
GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
if (gpsDir != null) {
GeoLocation loc = gpsDir.getGeoLocation();
if (loc != null) {
double latitude = loc.getLatitude();
double longitude = loc.getLongitude();
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), getDisplayName(), latitude));
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), getDisplayName(), longitude));
}
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
if (altitude != null) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), getDisplayName(), altitude.doubleValue()));
}
}
// Device info
ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
if (devDir != null) {
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
if (model != null && !model.isEmpty()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), getDisplayName(), model));
}
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
if (make != null && !make.isEmpty()) {
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), getDisplayName(), make));
}
}
// Add the attributes, if there are any, to a new artifact
if (!attributes.isEmpty()) {
BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
bba.addAttributes(attributes);
filesToFire = true;
}
} catch (TskCoreException ex) {
logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage());
} catch (ImageProcessingException ex) {
logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()});
} catch (IOException ex) {
logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex);
} finally {
try {
if (in != null) {
in.close();
}
if (bin != null) {
bin.close();
}
} catch (IOException ex) {
logger.log(Level.WARNING, "Failed to close InputStream.", ex);
}
}
}
/**
* Checks if should try to attempt to extract exif. Currently checks if JPEG
* image (by signature)
*
* @param f file to be checked
*
* @return true if to be processed
*/
private boolean parsableFormat(AbstractFile f) {
return ImageUtils.isJpegFileHeader(f);
}
@Override
public void complete() {
logger.log(Level.INFO, "completed exif parsing {0}", this.toString());
if (filesToFire) {
//send the final new data event
services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
}
}
@Override
public void stop() {
}
}

View File

@ -0,0 +1,62 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.exifparser;
import java.io.Serializable;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
/**
* An factory that creates file ingest modules that do hash database lookups.
*/
@ServiceProvider(service=IngestModuleFactory.class)
public class ExifParserModuleFactory extends AbstractIngestModuleFactory {
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return "Exif Image Parser";
}
@Override
public String getModuleDescription() {
return "Ingests JPEG files and retrieves their EXIF metadata.";
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public boolean isFileIngestModuleFactory() {
return true;
}
@Override
public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
return new ExifParserFileIngestModule();
}
}

View File

@ -66,7 +66,7 @@ public class HashDbIngestModule implements FileIngestModule {
}
@Override
public void init(long dataSourceTaskId) {
public void init(long taskId) {
services = IngestServices.getDefault();
skCase = Case.getCurrentCase().getSleuthkitCase();
@ -76,22 +76,20 @@ public class HashDbIngestModule implements FileIngestModule {
calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes();
if (knownHashSets.isEmpty()) {
// RJCTODO
// services.postMessage(IngestMessage.createWarningMessage(++messageId,
// this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.noKnownHashDbSetMsg"),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.knownFileSearchWillNotExecuteWarn")));
services.postMessage(IngestMessage.createWarningMessage(++messageId,
this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.noKnownHashDbSetMsg"),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.knownFileSearchWillNotExecuteWarn")));
}
if (knownBadHashSets.isEmpty()) {
// RJCTODO
// services.postMessage(IngestMessage.createWarningMessage(++messageId,
// this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.noKnownBadHashDbSetMsg"),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn")));
services.postMessage(IngestMessage.createWarningMessage(++messageId,
this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.noKnownBadHashDbSetMsg"),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn")));
}
}
@ -122,7 +120,6 @@ public class HashDbIngestModule implements FileIngestModule {
// bail out if we have no hashes set
if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) {
// return ProcessResult.OK;
return;
}
@ -136,14 +133,14 @@ public class HashDbIngestModule implements FileIngestModule {
calctime += (System.currentTimeMillis() - calcstart);
} catch (IOException ex) {
logger.log(Level.WARNING, "Error calculating hash of file " + name, ex);
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
// HashDbIngestModule.this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.fileReadErrorMsg",
// name),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.calcHashValueErr",
// name)));
services.postMessage(IngestMessage.createErrorMessage(++messageId,
HashDbIngestModule.this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.fileReadErrorMsg",
name),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.calcHashValueErr",
name)));
// return ProcessResult.ERROR;
return;
}
@ -163,14 +160,14 @@ public class HashDbIngestModule implements FileIngestModule {
skCase.setKnown(file, TskData.FileKnown.BAD);
} catch (TskException ex) {
logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex);
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
// HashDbIngestModule.this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.hashLookupErrorMsg",
// name),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.settingKnownBadStateErr",
// name)));
services.postMessage(IngestMessage.createErrorMessage(++messageId,
HashDbIngestModule.this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.hashLookupErrorMsg",
name),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.settingKnownBadStateErr",
name)));
// ret = ProcessResult.ERROR;
}
String hashSetName = db.getHashSetName();
@ -194,14 +191,15 @@ public class HashDbIngestModule implements FileIngestModule {
lookuptime += (System.currentTimeMillis() - lookupstart);
} catch (TskException ex) {
logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex);
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
// HashDbIngestModule.this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.hashLookupErrorMsg",
// name),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.lookingUpKnownBadHashValueErr",
// name)));
services.postMessage(IngestMessage.createErrorMessage(++messageId,
HashDbIngestModule.this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.hashLookupErrorMsg",
name),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.lookingUpKnownBadHashValueErr",
name)));
// RJCTODO
// ret = ProcessResult.ERROR;
}
}
@ -219,28 +217,22 @@ public class HashDbIngestModule implements FileIngestModule {
break;
} catch (TskException ex) {
logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex);
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
// HashDbIngestModule.this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.hashLookupErrorMsg",
// name),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.settingsKnownStateErr",
// name)));
// RJCTODO
// ret = ProcessResult.ERROR;
}
}
lookuptime += (System.currentTimeMillis() - lookupstart);
} catch (TskException ex) {
logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex);
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
// HashDbIngestModule.this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.hashLookupErrorMsg",
// name),
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.lookingUpKnownHashValueErr",
// name)));
services.postMessage(IngestMessage.createErrorMessage(++messageId,
HashDbIngestModule.this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.hashLookupErrorMsg",
name),
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.lookingUpKnownHashValueErr",
name)));
// RJCTODO
// ret = ProcessResult.ERROR;
}
}
@ -294,13 +286,13 @@ public class HashDbIngestModule implements FileIngestModule {
detailsSb.append("</table>");
// services.postMessage(IngestMessage.createDataMessage(++messageId, this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.postToBB.knownBadMsg",
// abstractFile.getName()),
// detailsSb.toString(),
// abstractFile.getName() + md5Hash,
// badFile));
services.postMessage(IngestMessage.createDataMessage(++messageId, this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.postToBB.knownBadMsg",
abstractFile.getName()),
detailsSb.toString(),
abstractFile.getName() + md5Hash,
badFile));
}
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
} catch (TskException ex) {
@ -337,12 +329,12 @@ public class HashDbIngestModule implements FileIngestModule {
}
detailsSb.append("</ul>");
// services.postMessage(IngestMessage.createMessage(++messageId,
// IngestMessage.MessageType.INFO,
// this,
// NbBundle.getMessage(this.getClass(),
// "HashDbIngestModule.complete.hashLookupResults"),
// detailsSb.toString()));
services.postMessage(IngestMessage.createMessage(++messageId,
IngestMessage.MessageType.INFO,
this,
NbBundle.getMessage(this.getClass(),
"HashDbIngestModule.complete.hashLookupResults"),
detailsSb.toString()));
}
}

View File

@ -50,10 +50,10 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
import org.sleuthkit.autopsy.coreutils.StopWatch;
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
import org.sleuthkit.datamodel.BlackboardArtifact;
@ -77,7 +77,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown;
*
* Registered as a module in layer.xml
*/
public final class KeywordSearchIngestModule {
public final class KeywordSearchIngestModule implements FileIngestModule {
enum UpdateFrequency {
@ -102,7 +102,6 @@ public final class KeywordSearchIngestModule {
public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class,
"KeywordSearchIngestModule.moduleDescription");
final public static String MODULE_VERSION = Version.getVersion();
private static KeywordSearchIngestModule instance = null;
private IngestServices services;
private Ingester ingester = null;
private volatile boolean commitIndex = false; //whether to commit index next time
@ -146,28 +145,110 @@ public final class KeywordSearchIngestModule {
};
private Map<Long, IngestStatus> ingestStatus;
//private constructor to ensure singleton instance
private KeywordSearchIngestModule() {
KeywordSearchIngestModule() {
}
/**
* Returns singleton instance of the module, creates one if needed
* Initializes the module for new ingest run Sets up threads, timers,
* retrieves settings, keyword lists to run on
*
* @return instance of the module
*/
public static synchronized KeywordSearchIngestModule getDefault() {
if (instance == null) {
instance = new KeywordSearchIngestModule();
@Override
public void init(long taskId) {
logger.log(Level.INFO, "init()");
services = IngestServices.getDefault();
initialized = false;
caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika();
ingester = Server.getIngester();
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
logger.log(Level.SEVERE, msg);
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
return;
}
return instance;
} catch (KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
//this means Solr is not properly initialized
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
return;
}
//initialize extractors
stringExtractor = new AbstractFileStringExtract();
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
//log the scripts used for debugging
final StringBuilder sbScripts = new StringBuilder();
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
sbScripts.append(s.name()).append(" ");
}
logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString());
textExtractors = new ArrayList<>();
//order matters, more specific extractors first
textExtractors.add(new AbstractFileHtmlExtract());
textExtractors.add(new AbstractFileTikaTextExtract());
ingestStatus = new HashMap<>();
keywords = new ArrayList<>();
keywordLists = new ArrayList<>();
keywordToList = new HashMap<>();
initKeywords();
if (keywords.isEmpty() || keywordLists.isEmpty()) {
services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
}
processedFiles = false;
finalSearcherDone = false;
searcherDone = true; //make sure to start the initial currentSearcher
//keeps track of all results per run not to repeat reporting the same hits
currentResults = new HashMap<>();
curDataSourceIds = new HashSet<>();
indexer = new Indexer();
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs);
logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs);
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
initialized = true;
commitTimer.start();
searchTimer.start();
}
@Override
public void process(AbstractFile abstractFile) {
if (initialized == false) //error initializing indexing/Solr
{
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName());
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName());
ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
return;
// RJCTODO
// return ProcessResult.OK;
}
try {
@ -181,9 +262,12 @@ public final class KeywordSearchIngestModule {
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
//skip indexing of virtual dirs (no content, no real name) - will index children files
return;
// RJCTODO
// return ProcessResult.OK;
}
// RJCTODO
//check if we should index meta-data only when 1) it is known 2) HashDb module errored on it
// if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) {
// indexer.indexFile(abstractFile, false);
@ -195,6 +279,7 @@ public final class KeywordSearchIngestModule {
if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) {
//index meta-data only
indexer.indexFile(abstractFile, false);
// RJCTODO
// return ProcessResult.OK;
}
@ -206,6 +291,7 @@ public final class KeywordSearchIngestModule {
//index the file and content (if the content is supported)
indexer.indexFile(abstractFile, true);
// RJCTODO
// return ProcessResult.OK;
}
@ -213,6 +299,7 @@ public final class KeywordSearchIngestModule {
* After all files are ingested, execute final index commit and final search
* Cleanup resources, threads, timers
*/
@Override
public void complete() {
if (initialized == false) {
return;
@ -249,12 +336,10 @@ public final class KeywordSearchIngestModule {
try {
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles);
logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks);
} catch (NoOpenCoreException ex) {
logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles);
logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks);
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex);
} catch (KeywordSearchModuleException se) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se);
}
//cleanup done in final searcher
@ -265,6 +350,7 @@ public final class KeywordSearchIngestModule {
/**
* Handle stop event (ingest interrupted) Cleanup resources, threads, timers
*/
@Override
public void stop() {
logger.log(Level.INFO, "stop()");
@ -319,152 +405,20 @@ public final class KeywordSearchIngestModule {
initialized = false;
}
public String getName() {
return MODULE_NAME;
}
public String getDescription() {
return MODULE_DESCRIPTION;
}
public String getVersion() {
return MODULE_VERSION;
}
/**
* Initializes the module for new ingest run Sets up threads, timers,
* retrieves settings, keyword lists to run on
*
*/
public void init(IngestModuleInit initContext) {
logger.log(Level.INFO, "init()");
services = IngestServices.getDefault();
initialized = false;
caseHandle = Case.getCurrentCase().getSleuthkitCase();
tikaFormatDetector = new Tika();
ingester = Server.getIngester();
final Server server = KeywordSearch.getServer();
try {
if (!server.isRunning()) {
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
logger.log(Level.SEVERE, msg);
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
return;
}
} catch (KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
//this means Solr is not properly initialized
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
return;
}
//initialize extractors
stringExtractor = new AbstractFileStringExtract();
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
//log the scripts used for debugging
final StringBuilder sbScripts = new StringBuilder();
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
sbScripts.append(s.name()).append(" ");
}
logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString());
textExtractors = new ArrayList<AbstractFileExtract>();
//order matters, more specific extractors first
textExtractors.add(new AbstractFileHtmlExtract());
textExtractors.add(new AbstractFileTikaTextExtract());
ingestStatus = new HashMap<Long, IngestStatus>();
keywords = new ArrayList<Keyword>();
keywordLists = new ArrayList<String>();
keywordToList = new HashMap<String, KeywordSearchListsAbstract.KeywordSearchList>();
initKeywords();
if (keywords.isEmpty() || keywordLists.isEmpty()) {
// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
}
processedFiles = false;
finalSearcherDone = false;
searcherDone = true; //make sure to start the initial currentSearcher
//keeps track of all results per run not to repeat reporting the same hits
currentResults = new HashMap<Keyword, List<Long>>();
curDataSourceIds = new HashSet<Long>();
indexer = new Indexer();
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs);
logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs);
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
initialized = true;
commitTimer.start();
searchTimer.start();
}
public boolean hasSimpleConfiguration() {
return true;
}
public boolean hasAdvancedConfiguration() {
return true;
}
public javax.swing.JPanel getSimpleConfiguration(String context) {
KeywordSearchListsXML.getCurrent().reload();
if (null == simpleConfigPanel) {
simpleConfigPanel = new KeywordSearchIngestSimplePanel();
}
else {
simpleConfigPanel.load();
}
return simpleConfigPanel;
}
public javax.swing.JPanel getAdvancedConfiguration(String context) {
if (advancedConfigPanel == null) {
advancedConfigPanel = new KeywordSearchConfigurationPanel();
}
advancedConfigPanel.load();
return advancedConfigPanel;
}
public void saveAdvancedConfiguration() {
if (advancedConfigPanel != null) {
advancedConfigPanel.store();
}
if (simpleConfigPanel != null) {
simpleConfigPanel.load();
}
}
public void saveSimpleConfiguration() {
KeywordSearchListsXML.getCurrent().save();
}
// RJCTODO
// public void saveAdvancedConfiguration() {
// if (advancedConfigPanel != null) {
// advancedConfigPanel.store();
// }
//
// if (simpleConfigPanel != null) {
// simpleConfigPanel.load();
// }
// }
//
// public void saveSimpleConfiguration() {
// KeywordSearchListsXML.getCurrent().save();
// }
/**
* The modules maintains background threads, return true if background
@ -473,15 +427,15 @@ public final class KeywordSearchIngestModule {
*
* @return
*/
public boolean hasBackgroundJobsRunning() {
if ((currentSearcher != null && searcherDone == false)
|| (finalSearcherDone == false)) {
return true;
} else {
return false;
}
}
// RJCTODO:
// public boolean hasBackgroundJobsRunning() {
// if ((currentSearcher != null && searcherDone == false)
// || (finalSearcherDone == false)) {
// return true;
// } else {
// return false;
// }
// }
/**
* Commits index and notifies listeners of index update
@ -540,7 +494,7 @@ public final class KeywordSearchIngestModule {
msg.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("</td><td>").append(error_io).append("</td></tr>");
msg.append("</table>");
String indexStats = msg.toString();
logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats);
logger.log(Level.INFO, "Keyword Indexing Completed: {0}", indexStats);
// RJCTODO
// services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats));
if (error_index > 0) {
@ -561,10 +515,8 @@ public final class KeywordSearchIngestModule {
try {
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles));
} catch (NoOpenCoreException ex) {
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex);
} catch (KeywordSearchModuleException se) {
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se);
}
}
@ -611,7 +563,7 @@ public final class KeywordSearchIngestModule {
}
logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString());
logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString());
}
@ -703,8 +655,7 @@ public final class KeywordSearchIngestModule {
}
if (fileExtract == null) {
logger.log(Level.INFO, "No text extractor found for file id:"
+ aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat);
logger.log(Level.INFO, "No text extractor found for file id:{0}, name: {1}, detected format: {2}", new Object[]{aFile.getId(), aFile.getName(), detectedFormat});
return false;
}
@ -727,7 +678,7 @@ public final class KeywordSearchIngestModule {
ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED);
return true;
} else {
logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
return false;
}
@ -833,7 +784,7 @@ public final class KeywordSearchIngestModule {
try {
//logger.log(Level.INFO, "indexing: " + aFile.getName());
if (!extractTextAndIndex(aFile, detectedFormat)) {
logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
} else {
ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED);
@ -877,9 +828,9 @@ public final class KeywordSearchIngestModule {
private boolean finalRun = false;
Searcher(List<String> keywordLists) {
this.keywordLists = new ArrayList<String>(keywordLists);
this.keywords = new ArrayList<Keyword>();
this.keywordToList = new HashMap<String, KeywordSearchListsAbstract.KeywordSearchList>();
this.keywordLists = new ArrayList<>(keywordLists);
this.keywords = new ArrayList<>();
this.keywordToList = new HashMap<>();
//keywords are populated as searcher runs
}
@ -944,7 +895,7 @@ public final class KeywordSearchIngestModule {
for (Keyword keywordQuery : keywords) {
if (this.isCancelled()) {
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery());
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keywordQuery.getQuery());
return null;
}
@ -975,7 +926,7 @@ public final class KeywordSearchIngestModule {
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds);
del.addFilter(dataSourceFilter);
Map<String, List<ContentHit>> queryResult = null;
Map<String, List<ContentHit>> queryResult;
try {
queryResult = del.performQuery();
@ -986,7 +937,7 @@ public final class KeywordSearchIngestModule {
//likely case has closed and threads are being interrupted
return null;
} catch (CancellationException e) {
logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery());
logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keywordQuery.getQuery());
return null;
} catch (Exception e) {
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
@ -1002,7 +953,7 @@ public final class KeywordSearchIngestModule {
//write results to BB
//new artifacts created, to report to listeners
Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>();
Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
//scale progress bar more more granular, per result sub-progress, within per keyword
int totalUnits = newResults.size();
@ -1019,7 +970,7 @@ public final class KeywordSearchIngestModule {
for (final Keyword hitTerm : newResults.keySet()) {
//checking for cancellation between results
if (this.isCancelled()) {
logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery());
logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: {0}", keywordQuery.getQuery());
return null;
}
@ -1036,7 +987,7 @@ public final class KeywordSearchIngestModule {
for (final AbstractFile hitFile : contentHitsFlattened.keySet()) {
// get the snippet for the first hit in the file
String snippet = null;
String snippet;
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery());
int chunkId = contentHitsFlattened.get(hitFile);
try {
@ -1053,7 +1004,7 @@ public final class KeywordSearchIngestModule {
// write the blackboard artifact for this keyword in this file
KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName);
if (written == null) {
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString());
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hitFile, hitTerm.toString()});
continue;
}
@ -1128,7 +1079,7 @@ public final class KeywordSearchIngestModule {
}
detailsSb.append("</table>");
// services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
}
} //for each file hit
@ -1156,7 +1107,7 @@ public final class KeywordSearchIngestModule {
try {
finalizeSearcher();
stopWatch.stop();
logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs.");
logger.log(Level.INFO, "Searcher took to run: {0} secs.", stopWatch.getElapsedTimeSecs());
} finally {
searcherLock.unlock();
}
@ -1226,13 +1177,13 @@ public final class KeywordSearchIngestModule {
//calculate new results but substracting results already obtained in this ingest
//update currentResults map with the new results
private Map<Keyword, List<ContentHit>> filterResults(Map<String, List<ContentHit>> queryResult, boolean isRegex) {
Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>();
Map<Keyword, List<ContentHit>> newResults = new HashMap<>();
for (String termResult : queryResult.keySet()) {
List<ContentHit> queryTermResults = queryResult.get(termResult);
//translate to list of IDs that we keep track of
List<Long> queryTermResultsIDs = new ArrayList<Long>();
List<Long> queryTermResultsIDs = new ArrayList<>();
for (ContentHit ch : queryTermResults) {
queryTermResultsIDs.add(ch.getId());
}
@ -1249,7 +1200,7 @@ public final class KeywordSearchIngestModule {
//add to new results
List<ContentHit> newResultsFs = newResults.get(termResultK);
if (newResultsFs == null) {
newResultsFs = new ArrayList<ContentHit>();
newResultsFs = new ArrayList<>();
newResults.put(termResultK, newResultsFs);
}
newResultsFs.add(res);

View File

@ -0,0 +1,121 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.io.Serializable;
import javax.swing.JPanel;
import org.openide.util.NbBundle;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
import org.sleuthkit.autopsy.ingest.FileIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
/**
* An factory that creates file ingest modules that do keyword searches.
*/
@ServiceProvider(service=IngestModuleFactory.class)
public class KeywordSearchModuleFactory extends AbstractIngestModuleFactory {
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleName");
}
@Override
public String getModuleDescription() {
return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription");
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public Serializable getDefaultIngestOptions() {
return new IngestOptions();
}
@Override
public boolean providesIngestOptionsPanels() {
return true;
}
@Override
public JPanel getIngestOptionsPanel(Serializable ingestOptions) {
KeywordSearchListsXML.getCurrent().reload();
return new KeywordSearchIngestSimplePanel(); // RJCTODO: Load required?
}
@Override
public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws IngestModuleFactory.InvalidOptionsException {
if (!(ingestOptionsPanel instanceof KeywordSearchIngestSimplePanel)) {
throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO
}
KeywordSearchIngestSimplePanel panel = (KeywordSearchIngestSimplePanel)ingestOptionsPanel;
panel.store();
return new IngestOptions(); // RJCTODO
}
@Override
public boolean providesGlobalOptionsPanels() {
return true;
}
@Override
public JPanel getGlobalOptionsPanel() {
KeywordSearchConfigurationPanel globalOptionsPanel = new KeywordSearchConfigurationPanel();
globalOptionsPanel.load();
return globalOptionsPanel;
}
@Override
public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws IngestModuleFactory.InvalidOptionsException {
if (!(globalOptionsPanel instanceof KeywordSearchConfigurationPanel)) {
throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO
}
KeywordSearchConfigurationPanel panel = (KeywordSearchConfigurationPanel)globalOptionsPanel;
panel.store();
// RJCTODO: Need simple panel store? May need to change implementation...see also hash db factory
}
@Override
public boolean isFileIngestModuleFactory() {
return true;
}
@Override
public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
return new KeywordSearchIngestModule();
}
private static class IngestOptions implements Serializable {
// RJCTODO: Any options here?
// boolean alwaysCalcHashes = true;
// ArrayList<String> hashSetNames = new ArrayList<>();
}
}

View File

@ -6,6 +6,14 @@
<code-name-base>org.sleuthkit.autopsy.ewfverify</code-name-base>
<suite-component/>
<module-dependencies>
<dependency>
<code-name-base>org.openide.util.lookup</code-name-base>
<build-prerequisite/>
<compile-dependency/>
<run-dependency>
<specification-version>8.19.1</specification-version>
</run-dependency>
</dependency>
<dependency>
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
<build-prerequisite/>

View File

@ -0,0 +1,63 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ewfverify;
import java.io.Serializable;
import org.openide.util.lookup.ServiceProvider;
import org.sleuthkit.autopsy.coreutils.Version;
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
/**
* An factory that creates file ingest modules that do hash database lookups.
*/
@ServiceProvider(service=IngestModuleFactory.class)
public class EwfVerifierModuleFactory extends AbstractIngestModuleFactory {
@Override
public String getModuleDisplayName() {
return getModuleName();
}
static String getModuleName() {
return "EWF Verify"; // RJCTODO: Is this what we want here?
}
@Override
public String getModuleDescription() {
return "Validates the integrity of E01 files.";
}
@Override
public String getModuleVersionNumber() {
return Version.getVersion();
}
@Override
public boolean isDataSourceIngestModuleFactory() {
return true;
}
@Override
public DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
return new EwfVerifyIngestModule();
}
}

View File

@ -1,7 +1,7 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Copyright 2013-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
@ -16,204 +16,180 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package org.sleuthkit.autopsy.ewfverify;
//
//import java.security.MessageDigest;
//import java.security.NoSuchAlgorithmException;
//import java.util.logging.Level;
//import java.util.logging.Logger;
//import javax.xml.bind.DatatypeConverter;
//import org.sleuthkit.autopsy.casemodule.Case;
//import org.sleuthkit.autopsy.coreutils.Version;
//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
//import org.sleuthkit.autopsy.ingest.IngestMessage;
//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
//import org.sleuthkit.autopsy.ingest.IngestServices;
//import org.sleuthkit.datamodel.Content;
//import org.sleuthkit.datamodel.Image;
//import org.sleuthkit.datamodel.SleuthkitCase;
//import org.sleuthkit.datamodel.TskCoreException;
//import org.sleuthkit.datamodel.TskData;
///**
// * Data Source Ingest Module that generates a hash of an E01 image file and
// * verifies it with the value stored in the image.
// *
// * @author jwallace
// */
//public class EwfVerifyIngestModule extends IngestModuleDataSource {
// private static final String MODULE_NAME = "EWF Verify";
// private static final String MODULE_VERSION = Version.getVersion();
// private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files.";
// private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
// private IngestServices services;
// private volatile boolean running = false;
// private Image img;
// private String imgName;
// private MessageDigest messageDigest;
// private static Logger logger = null;
// private static int messageId = 0;
// private boolean verified = false;
// private boolean skipped = false;
// private String calculatedHash = "";
// private String storedHash = "";
// private SleuthkitCase skCase;
//
// public EwfVerifyIngestModule() {
// }
//
// @Override
// public void process(PipelineContext<IngestModuleDataSource> pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
// imgName = dataSource.getName();
// try {
// img = dataSource.getImage();
// } catch (TskCoreException ex) {
// img = null;
// logger.log(Level.SEVERE, "Failed to get image from Content.", ex);
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
// "Error processing " + imgName));
// return;
// }
//
// // Skip images that are not E01
// if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
// img = null;
// logger.log(Level.INFO, "Skipping non-ewf image " + imgName);
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
// "Skipping non-ewf image " + imgName));
// skipped = true;
// return;
// }
//
//
// if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
// {
// storedHash = img.getMd5().toLowerCase();
// logger.info("Hash value stored in " + imgName + ": " + storedHash);
//
// }
// else {
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
// "Image " + imgName + " does not have stored hash."));
// return;
// }
//
// logger.log(Level.INFO, "Starting ewf verification of " + img.getName());
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
// "Starting " + imgName));
//
// long size = img.getSize();
// if (size == 0) {
// logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried.");
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
// "Error getting size of " + imgName + ". Image will not be processed."));
// }
//
// // Libewf uses a sector size of 64 times the sector size, which is the
// // motivation for using it here.
// long chunkSize = 64 * img.getSsize();
// chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
//
// int totalChunks = (int) Math.ceil(size / chunkSize);
// logger.log(Level.INFO, "Total chunks = " + totalChunks);
// int read;
//
// byte[] data;
// controller.switchToDeterminate(totalChunks);
//
// running = true;
// // Read in byte size chunks and update the hash value with the data.
// for (int i = 0; i < totalChunks; i++) {
// if (controller.isCancelled()) {
// running = false;
// return;
// }
// data = new byte[ (int) chunkSize ];
// try {
// read = img.read(data, i * chunkSize, chunkSize);
// } catch (TskCoreException ex) {
// String msg = "Error reading " + imgName + " at chunk " + i;
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg));
// logger.log(Level.SEVERE, msg, ex);
// return;
// }
// messageDigest.update(data);
// controller.progress(i);
// }
//
// // Finish generating the hash and get it as a string value
// calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
// verified = calculatedHash.equals(storedHash);
// logger.info("Hash calculated from " + imgName + ": " + calculatedHash);
// running = false;
// }
//
// @Override
// public void init(IngestModuleInit initContext) {
// services = IngestServices.getDefault();
// skCase = Case.getCurrentCase().getSleuthkitCase();
// running = false;
// verified = false;
// skipped = false;
// img = null;
// imgName = "";
// storedHash = "";
// calculatedHash = "";
//
// if (logger == null) {
// logger = services.getLogger(this);
// }
//
// if (messageDigest == null) {
// try {
// messageDigest = MessageDigest.getInstance("MD5");
// } catch (NoSuchAlgorithmException ex) {
// logger.log(Level.WARNING, "Error getting md5 algorithm", ex);
// throw new RuntimeException("Failed to get MD5 algorithm");
// }
// } else {
// messageDigest.reset();
// }
// }
//
// @Override
// public void complete() {
// logger.info("complete() " + this.getName());
// if (skipped == false) {
// String msg = verified ? " verified" : " not verified";
// String extra = "<p>EWF Verification Results for " + imgName + "</p>";
// extra += "<li>Result:" + msg + "</li>";
// extra += "<li>Calculated hash: " + calculatedHash + "</li>";
// extra += "<li>Stored hash: " + storedHash + "</li>";
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra));
// logger.info(imgName + msg);
// }
// }
//
// @Override
// public void stop() {
// running = false;
// }
//
// @Override
// public String getName() {
// return MODULE_NAME;
// }
//
// @Override
// public String getVersion() {
// return MODULE_VERSION;
// }
//
// @Override
// public String getDescription() {
// return MODULE_DESCRIPTION;
// }
//
// @Override
// public boolean hasBackgroundJobsRunning() {
// return running;
// }
//}
package org.sleuthkit.autopsy.ewfverify;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.bind.DatatypeConverter;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
import org.sleuthkit.autopsy.ingest.IngestMessage;
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
import org.sleuthkit.autopsy.ingest.IngestServices;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.datamodel.Image;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskData;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
/**
* Data source ingest module that verifies the integrity of an Expert Witness
* Format (EWF) E01 image file by generating a hash of the file and comparing it
* to the value stored in the image.
*/
public class EwfVerifyIngestModule implements DataSourceIngestModule {
private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
private IngestServices services;
private volatile boolean running = false;
private Image img;
private String imgName;
private MessageDigest messageDigest;
private static Logger logger = null;
private static int messageId = 0;
private boolean verified = false;
private boolean skipped = false;
private String calculatedHash = "";
private String storedHash = "";
EwfVerifyIngestModule() {
}
@Override
public String getDisplayName() {
return EwfVerifierModuleFactory.getModuleName();
}
@Override
public void init(long taskId) {
services = IngestServices.getDefault();
running = false;
verified = false;
skipped = false;
img = null;
imgName = "";
storedHash = "";
calculatedHash = "";
if (logger == null) {
logger = services.getLogger(this);
}
if (messageDigest == null) {
try {
messageDigest = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException ex) {
logger.log(Level.WARNING, "Error getting md5 algorithm", ex);
throw new RuntimeException("Failed to get MD5 algorithm");
}
} else {
messageDigest.reset();
}
}
@Override
public void process(Content dataSource, IngestDataSourceWorkerController statusHelper) {
imgName = dataSource.getName();
try {
img = dataSource.getImage();
} catch (TskCoreException ex) {
img = null;
logger.log(Level.SEVERE, "Failed to get image from Content.", ex);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error processing " + imgName));
return;
}
// Skip images that are not E01
if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
img = null;
logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
"Skipping non-ewf image " + imgName));
skipped = true;
return;
}
if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
{
storedHash = img.getMd5().toLowerCase();
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash});
}
else {
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Image " + imgName + " does not have stored hash."));
return;
}
logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName());
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
"Starting " + imgName));
long size = img.getSize();
if (size == 0) {
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName);
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
"Error getting size of " + imgName + ". Image will not be processed."));
}
// Libewf uses a sector size of 64 times the sector size, which is the
// motivation for using it here.
long chunkSize = 64 * img.getSsize();
chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
int totalChunks = (int) Math.ceil(size / chunkSize);
logger.log(Level.INFO, "Total chunks = {0}", totalChunks);
int read;
byte[] data;
statusHelper.switchToDeterminate(totalChunks);
running = true;
// Read in byte size chunks and update the hash value with the data.
for (int i = 0; i < totalChunks; i++) {
if (statusHelper.isCancelled()) {
running = false;
return;
}
data = new byte[ (int) chunkSize ];
try {
read = img.read(data, i * chunkSize, chunkSize);
} catch (TskCoreException ex) {
String msg = "Error reading " + imgName + " at chunk " + i;
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg));
logger.log(Level.SEVERE, msg, ex);
return;
}
messageDigest.update(data);
statusHelper.progress(i);
}
// Finish generating the hash and get it as a string value
calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
verified = calculatedHash.equals(storedHash);
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash});
running = false;
}
@Override
public void complete() {
logger.log(Level.INFO, "complete() {0}", getDisplayName());
if (skipped == false) {
String msg = verified ? " verified" : " not verified";
String extra = "<p>EWF Verification Results for " + imgName + "</p>";
extra += "<li>Result:" + msg + "</li>";
extra += "<li>Calculated hash: " + calculatedHash + "</li>";
extra += "<li>Stored hash: " + storedHash + "</li>";
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra));
logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
}
}
@Override
public void stop() {
}
}