mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-12 07:56:16 +00:00
Work towards converting core ingest modules to new ingest API
This commit is contained in:
parent
bb2f26d8af
commit
2b95138f70
@ -32,6 +32,5 @@ public interface DataSourceIngestModule extends IngestModule {
|
|||||||
* @param statusHelper A status helper to be used to report progress and
|
* @param statusHelper A status helper to be used to report progress and
|
||||||
* detect task cancellation.
|
* detect task cancellation.
|
||||||
*/
|
*/
|
||||||
// void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class
|
void process(Content dataSource, IngestDataSourceWorkerController statusHelper); // RJCTODO: Change name of IngestDataSourceWorkerController class
|
||||||
void process(Content dataSource);
|
|
||||||
}
|
}
|
@ -18,157 +18,154 @@
|
|||||||
*/
|
*/
|
||||||
package org.sleuthkit.autopsy.ingest;
|
package org.sleuthkit.autopsy.ingest;
|
||||||
|
|
||||||
//import java.awt.EventQueue;
|
import java.awt.EventQueue;
|
||||||
//import java.util.concurrent.locks.Lock;
|
import java.util.concurrent.locks.Lock;
|
||||||
//import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
//import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
//import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
//import javax.swing.SwingWorker;
|
import javax.swing.SwingWorker;
|
||||||
//import org.netbeans.api.progress.ProgressHandle;
|
import org.netbeans.api.progress.ProgressHandle;
|
||||||
//import org.netbeans.api.progress.ProgressHandleFactory;
|
import org.netbeans.api.progress.ProgressHandleFactory;
|
||||||
//import org.openide.util.Cancellable;
|
import org.openide.util.Cancellable;
|
||||||
//import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
|
||||||
//import org.sleuthkit.autopsy.coreutils.StopWatch;
|
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
|
import org.sleuthkit.autopsy.ingest.IngestManager.IngestModuleEvent;
|
||||||
//import org.sleuthkit.datamodel.Content;
|
import org.sleuthkit.datamodel.Content;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Worker thread that runs a data source-level ingest module (image, file set virt dir, etc).
|
* Worker thread that runs a data source-level ingest module (image, file set virt dir, etc).
|
||||||
* Used to process only a single data-source and single module.
|
* Used to process only a single data-source and single module.
|
||||||
*/
|
*/
|
||||||
// class IngestDataSourceThread extends SwingWorker<Void, Void> {
|
class IngestDataSourceThread extends SwingWorker<Void, Void> {
|
||||||
//
|
|
||||||
// private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName());
|
private final Logger logger = Logger.getLogger(IngestDataSourceThread.class.getName());
|
||||||
// private ProgressHandle progress;
|
private ProgressHandle progress;
|
||||||
// private final Content dataSource;
|
private final Content dataSource;
|
||||||
// private final DataSourceIngestModule module;
|
private final DataSourceIngestModule module;
|
||||||
// private IngestDataSourceWorkerController controller;
|
private IngestDataSourceWorkerController controller;
|
||||||
// private final IngestManager manager;
|
private final IngestManager manager;
|
||||||
// private final IngestModuleInit init;
|
private boolean inited;
|
||||||
// private boolean inited;
|
//current method of enqueuing data source ingest modules with locks and internal lock queue
|
||||||
// //current method of enqueuing data source ingest modules with locks and internal lock queue
|
//ensures that we init, run and complete a single data source ingest module at a time
|
||||||
// //ensures that we init, run and complete a single data source ingest module at a time
|
//uses fairness policy to run them in order enqueued
|
||||||
// //uses fairness policy to run them in order enqueued
|
private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
|
||||||
// private static final Lock dataSourceIngestModuleLock = new ReentrantReadWriteLock(true).writeLock();
|
|
||||||
//
|
IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module) {
|
||||||
// IngestDataSourceThread(IngestManager manager, Content dataSource, DataSourceIngestModule module, IngestModuleInit init) {
|
this.manager = manager;
|
||||||
// this.manager = manager;
|
this.dataSource = dataSource;
|
||||||
// this.dataSource = dataSource;
|
this.module = module;
|
||||||
// this.module = module;
|
this.inited = false;
|
||||||
// this.init = init;
|
}
|
||||||
// this.inited = false;
|
|
||||||
// }
|
Content getContent() {
|
||||||
//
|
return dataSource;
|
||||||
// Content getContent() {
|
}
|
||||||
// return dataSource;
|
|
||||||
// }
|
DataSourceIngestModule getModule() {
|
||||||
//
|
return module;
|
||||||
// DataSourceIngestModule getModule() {
|
}
|
||||||
// return module;
|
|
||||||
// }
|
public void init() {
|
||||||
//
|
|
||||||
// public void init() {
|
logger.log(Level.INFO, "Initializing module: {0}", module.getDisplayName());
|
||||||
//
|
try {
|
||||||
// logger.log(Level.INFO, "Initializing module: " + module.getName());
|
module.init(dataSource.getId());
|
||||||
// try {
|
inited = true;
|
||||||
// module.init(dataSource.getId());
|
} catch (Exception e) {
|
||||||
// inited = true;
|
logger.log(Level.INFO, "Failed initializing module: {0}, will not run.", module.getDisplayName());
|
||||||
// } catch (Exception e) {
|
//will not run
|
||||||
// logger.log(Level.INFO, "Failed initializing module: " + module.getName() + ", will not run.");
|
inited = false;
|
||||||
// //will not run
|
throw e;
|
||||||
// inited = false;
|
}
|
||||||
// throw e;
|
}
|
||||||
// }
|
|
||||||
// }
|
@Override
|
||||||
//
|
protected Void doInBackground() throws Exception {
|
||||||
// @Override
|
|
||||||
// protected Void doInBackground() throws Exception {
|
logger.log(Level.INFO, "Pending module: {0}", module.getDisplayName());
|
||||||
//
|
|
||||||
// logger.log(Level.INFO, "Pending module: " + module.getName());
|
final String displayName = module.getDisplayName() + " dataSource id:" + dataSource.getId();
|
||||||
//
|
progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() {
|
||||||
// final String displayName = module.getName() + " dataSource id:" + dataSource.getId();
|
@Override
|
||||||
// progress = ProgressHandleFactory.createHandle(displayName + " (Pending...)", new Cancellable() {
|
public boolean cancel() {
|
||||||
// @Override
|
logger.log(Level.INFO, "DataSource ingest module {0} cancelled by user.", module.getDisplayName());
|
||||||
// public boolean cancel() {
|
if (progress != null) {
|
||||||
// logger.log(Level.INFO, "DataSource ingest module " + module.getName() + " cancelled by user.");
|
progress.setDisplayName(displayName + " (Cancelling...)");
|
||||||
// if (progress != null) {
|
}
|
||||||
// progress.setDisplayName(displayName + " (Cancelling...)");
|
return IngestDataSourceThread.this.cancel(true);
|
||||||
// }
|
}
|
||||||
// return IngestDataSourceThread.this.cancel(true);
|
});
|
||||||
// }
|
progress.start();
|
||||||
// });
|
progress.switchToIndeterminate();
|
||||||
// progress.start();
|
|
||||||
// progress.switchToIndeterminate();
|
dataSourceIngestModuleLock.lock();
|
||||||
//
|
try {
|
||||||
// dataSourceIngestModuleLock.lock();
|
if (this.isCancelled()) {
|
||||||
// try {
|
logger.log(Level.INFO, "Cancelled while pending, module: {0}", module.getDisplayName());
|
||||||
// if (this.isCancelled()) {
|
return Void.TYPE.newInstance();
|
||||||
// logger.log(Level.INFO, "Cancelled while pending, module: " + module.getName());
|
}
|
||||||
// return Void.TYPE.newInstance();
|
logger.log(Level.INFO, "Starting module: {0}", module.getDisplayName());
|
||||||
// }
|
logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
|
||||||
// logger.log(Level.INFO, "Starting module: " + module.getName());
|
progress.setDisplayName(displayName);
|
||||||
// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
|
|
||||||
// progress.setDisplayName(displayName);
|
if (inited == false) {
|
||||||
//
|
logger.log(Level.INFO, "Module wasn''t initialized, will not run: {0}", module.getDisplayName());
|
||||||
// if (inited == false) {
|
return Void.TYPE.newInstance();
|
||||||
// logger.log(Level.INFO, "Module wasn't initialized, will not run: " + module.getName());
|
}
|
||||||
// return Void.TYPE.newInstance();
|
logger.log(Level.INFO, "Starting processing of module: {0}", module.getDisplayName());
|
||||||
// }
|
|
||||||
// logger.log(Level.INFO, "Starting processing of module: " + module.getName());
|
controller = new IngestDataSourceWorkerController(this, progress);
|
||||||
//
|
|
||||||
// controller = new IngestDataSourceWorkerController(this, progress);
|
if (isCancelled()) {
|
||||||
//
|
logger.log(Level.INFO, "Terminating DataSource ingest module {0} due to cancellation.", module.getDisplayName());
|
||||||
// if (isCancelled()) {
|
return Void.TYPE.newInstance();
|
||||||
// logger.log(Level.INFO, "Terminating DataSource ingest module " + module.getName() + " due to cancellation.");
|
}
|
||||||
// return Void.TYPE.newInstance();
|
final StopWatch timer = new StopWatch();
|
||||||
// }
|
timer.start();
|
||||||
// final StopWatch timer = new StopWatch();
|
try {
|
||||||
// timer.start();
|
// RJCTODO
|
||||||
// try {
|
// module.process(pipelineContext, dataSource, controller);
|
||||||
// // RJCTODO
|
} catch (Exception e) {
|
||||||
//// module.process(pipelineContext, dataSource, controller);
|
logger.log(Level.WARNING, "Exception in module: " + module.getDisplayName() + " DataSource: " + dataSource.getName(), e);
|
||||||
// } catch (Exception e) {
|
} finally {
|
||||||
// logger.log(Level.WARNING, "Exception in module: " + module.getName() + " DataSource: " + dataSource.getName(), e);
|
timer.stop();
|
||||||
// } finally {
|
logger.log(Level.INFO, "Done processing of module: {0} took {1} secs. to process()", new Object[]{module.getDisplayName(), timer.getElapsedTimeSecs()});
|
||||||
// timer.stop();
|
|
||||||
// logger.log(Level.INFO, "Done processing of module: " + module.getName()
|
|
||||||
// + " took " + timer.getElapsedTimeSecs() + " secs. to process()");
|
//cleanup queues (worker and DataSource/module)
|
||||||
//
|
manager.removeDataSourceIngestWorker(this);
|
||||||
//
|
|
||||||
// //cleanup queues (worker and DataSource/module)
|
if (!this.isCancelled()) {
|
||||||
// manager.removeDataSourceIngestWorker(this);
|
logger.log(Level.INFO, "Module {0} completed", module.getDisplayName());
|
||||||
//
|
try {
|
||||||
// if (!this.isCancelled()) {
|
module.complete();
|
||||||
// logger.log(Level.INFO, "Module " + module.getName() + " completed");
|
} catch (Exception e) {
|
||||||
// try {
|
logger.log(Level.INFO, "Error completing the module " + module.getDisplayName(), e);
|
||||||
// module.complete();
|
}
|
||||||
// } catch (Exception e) {
|
IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getDisplayName());
|
||||||
// logger.log(Level.INFO, "Error completing the module " + module.getName(), e);
|
} else {
|
||||||
// }
|
logger.log(Level.INFO, "Module {0} stopped", module.getDisplayName());
|
||||||
// IngestManager.fireModuleEvent(IngestModuleEvent.COMPLETED.toString(), module.getName());
|
try {
|
||||||
// } else {
|
module.stop();
|
||||||
// logger.log(Level.INFO, "Module " + module.getName() + " stopped");
|
} catch (Exception e) {
|
||||||
// try {
|
logger.log(Level.INFO, "Error stopping the module" + module.getDisplayName(), e);
|
||||||
// module.stop();
|
}
|
||||||
// } catch (Exception e) {
|
IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getDisplayName());
|
||||||
// logger.log(Level.INFO, "Error stopping the module" + module.getName(), e);
|
}
|
||||||
// }
|
|
||||||
// IngestManager.fireModuleEvent(IngestModuleEvent.STOPPED.toString(), module.getName());
|
}
|
||||||
// }
|
return Void.TYPE.newInstance();
|
||||||
//
|
} finally {
|
||||||
// }
|
//release the lock so next module can run
|
||||||
// return Void.TYPE.newInstance();
|
dataSourceIngestModuleLock.unlock();
|
||||||
// } finally {
|
EventQueue.invokeLater(new Runnable() {
|
||||||
// //release the lock so next module can run
|
@Override
|
||||||
// dataSourceIngestModuleLock.unlock();
|
public void run() {
|
||||||
// EventQueue.invokeLater(new Runnable() {
|
progress.finish();
|
||||||
// @Override
|
}
|
||||||
// public void run() {
|
});
|
||||||
// progress.finish();
|
logger.log(Level.INFO, "Done running module: {0}", module.getDisplayName());
|
||||||
// }
|
logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
|
||||||
// });
|
}
|
||||||
// logger.log(Level.INFO, "Done running module: " + module.getName());
|
}
|
||||||
// logger.log(Level.INFO, PlatformUtil.getAllMemUsageInfo());
|
}
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2011 Basis Technology Corp.
|
* Copyright 2011-2014 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -18,64 +18,64 @@
|
|||||||
*/
|
*/
|
||||||
package org.sleuthkit.autopsy.ingest;
|
package org.sleuthkit.autopsy.ingest;
|
||||||
|
|
||||||
//import org.netbeans.api.progress.ProgressHandle;
|
import org.netbeans.api.progress.ProgressHandle;
|
||||||
|
|
||||||
// RJCTODO: Rework or replace this code
|
// RJCTODO: This could use a renaming, really don't want it long-term, but maybe need to keep it for 3.1 DISCUSS
|
||||||
/**
|
/**
|
||||||
* Controller for DataSource level ingest modules
|
* Controller for DataSource level ingest modules
|
||||||
* Used by modules to check task status and to post progress to
|
* Used by modules to check task status and to post progress to
|
||||||
*/
|
*/
|
||||||
//public class IngestDataSourceWorkerController {
|
public class IngestDataSourceWorkerController {
|
||||||
//
|
|
||||||
// private IngestDataSourceThread worker;
|
private IngestDataSourceThread worker;
|
||||||
// private ProgressHandle progress;
|
private ProgressHandle progress;
|
||||||
//
|
|
||||||
// /**
|
/**
|
||||||
// * Instantiate the controller for the worker
|
* Instantiate the controller for the worker
|
||||||
// * @param worker underlying DataSource ingest thread
|
* @param worker underlying DataSource ingest thread
|
||||||
// * @param progress the progress handle
|
* @param progress the progress handle
|
||||||
// */
|
*/
|
||||||
// IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) {
|
IngestDataSourceWorkerController(IngestDataSourceThread worker, ProgressHandle progress) {
|
||||||
// this.worker = worker;
|
this.worker = worker;
|
||||||
// this.progress = progress;
|
this.progress = progress;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// /**
|
/**
|
||||||
// * Check if the task has been cancelled. This should be polled by the module periodically
|
* Check if the task has been canceled. This should be polled by the module periodically
|
||||||
// * And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup
|
* And the module needs to act, i.e. break out of its processing loop and call its stop() to cleanup
|
||||||
// *
|
*
|
||||||
// * @return true if the task has been cancelled, false otherwise
|
* @return true if the task has been canceled, false otherwise
|
||||||
// */
|
*/
|
||||||
// public boolean isCancelled() {
|
public boolean isCancelled() {
|
||||||
// return worker.isCancelled();
|
return worker.isCancelled();
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// /**
|
/**
|
||||||
// * Update the progress bar and switch to determinate mode once number of total work units is known
|
* Update the progress bar and switch to determinate mode once number of total work units is known
|
||||||
// * @param workUnits total number of work units for the DataSource ingest task
|
* @param workUnits total number of work units for the DataSource ingest task
|
||||||
// */
|
*/
|
||||||
// public void switchToDeterminate(int workUnits) {
|
public void switchToDeterminate(int workUnits) {
|
||||||
// if (progress != null) {
|
if (progress != null) {
|
||||||
// progress.switchToDeterminate(workUnits);
|
progress.switchToDeterminate(workUnits);
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// /**
|
/**
|
||||||
// * Update the progress bar and switch to non determinate mode if number of work units is not known
|
* Update the progress bar and switch to non determinate mode if number of work units is not known
|
||||||
// */
|
*/
|
||||||
// public void switchToInDeterminate() {
|
public void switchToInDeterminate() {
|
||||||
// if (progress != null) {
|
if (progress != null) {
|
||||||
// progress.switchToIndeterminate();
|
progress.switchToIndeterminate();
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// /**
|
/**
|
||||||
// * Update the progress bar with the number of work units performed, if in the determinate mode
|
* Update the progress bar with the number of work units performed, if in the determinate mode
|
||||||
// * @param workUnits number of work units performed so far by the module
|
* @param workUnits number of work units performed so far by the module
|
||||||
// */
|
*/
|
||||||
// public void progress(int workUnits) {
|
public void progress(int workUnits) {
|
||||||
// if (progress != null) {
|
if (progress != null) {
|
||||||
// progress.progress(worker.getContent().getName(), workUnits);
|
progress.progress(worker.getContent().getName(), workUnits);
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//}
|
}
|
@ -58,7 +58,7 @@ public class IngestManager {
|
|||||||
// private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement
|
// private IngestManagerStats stats; // RJCTODO: Decide whether to reimplement
|
||||||
private final IngestScheduler scheduler;
|
private final IngestScheduler scheduler;
|
||||||
private IngestAbstractFileProcessor abstractFileIngester;
|
private IngestAbstractFileProcessor abstractFileIngester;
|
||||||
// private List<IngestDataSourceThread> dataSourceIngesters; // RJCTODO: Adapt to new paradigm
|
private List<IngestDataSourceThread> dataSourceIngesters;
|
||||||
private SwingWorker<Object, Void> queueWorker;
|
private SwingWorker<Object, Void> queueWorker;
|
||||||
// private final Map<String, IngestModuleAbstractFile.ProcessResult> abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete
|
// private final Map<String, IngestModuleAbstractFile.ProcessResult> abstractFileModulesRetValues = new HashMap<>(); RJCTODO: May be obsolete
|
||||||
private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class);
|
private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class);
|
||||||
@ -672,14 +672,13 @@ public class IngestManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// RJCTODO: Data source ingest is temporarily disabled
|
|
||||||
//data source worker to remove itself when complete or interrupted
|
//data source worker to remove itself when complete or interrupted
|
||||||
// void removeDataSourceIngestWorker(IngestDataSourceThread worker) {
|
void removeDataSourceIngestWorker(IngestDataSourceThread worker) {
|
||||||
// //remove worker
|
//remove worker
|
||||||
// synchronized (this) {
|
synchronized (this) {
|
||||||
// dataSourceIngesters.remove(worker);
|
dataSourceIngesters.remove(worker);
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|
||||||
// RJCTODO: Decide whether or not to reimplement this class
|
// RJCTODO: Decide whether or not to reimplement this class
|
||||||
/**
|
/**
|
||||||
|
@ -1,49 +0,0 @@
|
|||||||
/*
|
|
||||||
* Autopsy Forensic Browser
|
|
||||||
*
|
|
||||||
* Copyright 2012 Basis Technology Corp.
|
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
|
||||||
*
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.sleuthkit.autopsy.ingest;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* Context passed to a module at initialization time.
|
|
||||||
* It may contain module configuration required to initialize some modules.
|
|
||||||
*/
|
|
||||||
public class IngestModuleInit {
|
|
||||||
|
|
||||||
// private String moduleArgs;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get module arguments
|
|
||||||
* @return module args string, used by some modules
|
|
||||||
*/
|
|
||||||
// public String getModuleArgs() {
|
|
||||||
// return moduleArgs;
|
|
||||||
// }
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets module args. string (only used by module pipeline)
|
|
||||||
* @param moduleArgs arguments to set for the module
|
|
||||||
*/
|
|
||||||
// void setModuleArgs(String moduleArgs) {
|
|
||||||
// this.moduleArgs = moduleArgs;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -6,6 +6,14 @@
|
|||||||
<code-name-base>org.sleuthkit.autopsy.exifparser</code-name-base>
|
<code-name-base>org.sleuthkit.autopsy.exifparser</code-name-base>
|
||||||
<suite-component/>
|
<suite-component/>
|
||||||
<module-dependencies>
|
<module-dependencies>
|
||||||
|
<dependency>
|
||||||
|
<code-name-base>org.openide.util.lookup</code-name-base>
|
||||||
|
<build-prerequisite/>
|
||||||
|
<compile-dependency/>
|
||||||
|
<run-dependency>
|
||||||
|
<specification-version>8.19.1</specification-version>
|
||||||
|
</run-dependency>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
|
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
|
||||||
<build-prerequisite/>
|
<build-prerequisite/>
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2011-2013 Basis Technology Corp.
|
* Copyright 2011-2014 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -16,229 +16,193 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
//package org.sleuthkit.autopsy.exifparser;
|
|
||||||
//
|
package org.sleuthkit.autopsy.exifparser;
|
||||||
//import com.drew.imaging.ImageMetadataReader;
|
|
||||||
//import com.drew.imaging.ImageProcessingException;
|
import com.drew.imaging.ImageMetadataReader;
|
||||||
//import com.drew.lang.GeoLocation;
|
import com.drew.imaging.ImageProcessingException;
|
||||||
//import com.drew.lang.Rational;
|
import com.drew.lang.GeoLocation;
|
||||||
//import com.drew.metadata.Metadata;
|
import com.drew.lang.Rational;
|
||||||
//import com.drew.metadata.exif.ExifIFD0Directory;
|
import com.drew.metadata.Metadata;
|
||||||
//import com.drew.metadata.exif.ExifSubIFDDirectory;
|
import com.drew.metadata.exif.ExifIFD0Directory;
|
||||||
//import com.drew.metadata.exif.GpsDirectory;
|
import com.drew.metadata.exif.ExifSubIFDDirectory;
|
||||||
//import java.io.BufferedInputStream;
|
import com.drew.metadata.exif.GpsDirectory;
|
||||||
//import java.io.IOException;
|
import java.io.BufferedInputStream;
|
||||||
//import java.io.InputStream;
|
import java.io.IOException;
|
||||||
//import java.util.ArrayList;
|
import java.io.InputStream;
|
||||||
//import java.util.Collection;
|
import java.util.ArrayList;
|
||||||
//import java.util.Date;
|
import java.util.Collection;
|
||||||
//import java.util.logging.Level;
|
import java.util.Date;
|
||||||
//import org.sleuthkit.autopsy.coreutils.ImageUtils;
|
import java.util.logging.Level;
|
||||||
//import org.sleuthkit.autopsy.coreutils.Logger;
|
import org.sleuthkit.autopsy.coreutils.ImageUtils;
|
||||||
//import org.sleuthkit.autopsy.coreutils.Version;
|
import org.sleuthkit.autopsy.coreutils.Logger;
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestServices;
|
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestModuleAbstractFile;
|
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
|
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||||
//import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
import org.sleuthkit.datamodel.AbstractFile;
|
||||||
//import org.sleuthkit.datamodel.AbstractFile;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
//import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardAttribute;
|
||||||
//import org.sleuthkit.datamodel.BlackboardAttribute;
|
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
||||||
//import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
|
import org.sleuthkit.datamodel.ReadContentInputStream;
|
||||||
//import org.sleuthkit.datamodel.ReadContentInputStream;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
//import org.sleuthkit.datamodel.TskCoreException;
|
import org.sleuthkit.datamodel.TskData;
|
||||||
//import org.sleuthkit.datamodel.TskData;
|
import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
|
||||||
//import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ingest module to parse image Exif metadata. Currently only supports JPEG
|
* Ingest module to parse image Exif metadata. Currently only supports JPEG
|
||||||
* files. Ingests an image file and, if available, adds it's date, latitude,
|
* files. Ingests an image file and, if available, adds it's date, latitude,
|
||||||
* longitude, altitude, device model, and device make to a blackboard artifact.
|
* longitude, altitude, device model, and device make to a blackboard artifact.
|
||||||
*/
|
*/
|
||||||
//public final class ExifParserFileIngestModule extends IngestModuleAbstractFile {
|
public final class ExifParserFileIngestModule implements FileIngestModule {
|
||||||
//
|
|
||||||
// private IngestServices services;
|
private IngestServices services;
|
||||||
// final public static String MODULE_NAME = "Exif Parser";
|
private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
|
||||||
// final public static String MODULE_VERSION = Version.getVersion();
|
private int filesProcessed = 0;
|
||||||
// private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName());
|
private boolean filesToFire = false;
|
||||||
// private static ExifParserFileIngestModule defaultInstance = null;
|
|
||||||
// private int filesProcessed = 0;
|
ExifParserFileIngestModule() {
|
||||||
// private boolean filesToFire = false;
|
}
|
||||||
//
|
|
||||||
// //file ingest modules require a private constructor
|
@Override
|
||||||
// //to ensure singleton instances
|
public String getDisplayName() {
|
||||||
// private ExifParserFileIngestModule() {
|
return ExifParserModuleFactory.getModuleName();
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// //default instance used for module registration
|
@Override
|
||||||
// public static synchronized ExifParserFileIngestModule getDefault() {
|
public void init(long taskId) {
|
||||||
// if (defaultInstance == null) {
|
services = IngestServices.getDefault();
|
||||||
// defaultInstance = new ExifParserFileIngestModule();
|
logger.log(Level.INFO, "init() {0}", this.toString());
|
||||||
// }
|
filesProcessed = 0;
|
||||||
// return defaultInstance;
|
filesToFire = false;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// @Override
|
@Override
|
||||||
// public IngestModuleAbstractFile.ProcessResult process(PipelineContext<IngestModuleAbstractFile> pipelineContext, AbstractFile content) {
|
public void process(AbstractFile content) {
|
||||||
//
|
|
||||||
// //skip unalloc
|
//skip unalloc
|
||||||
// if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
|
if (content.getType().equals(TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS)) {
|
||||||
// return IngestModuleAbstractFile.ProcessResult.OK;
|
return;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// // skip known
|
// skip known
|
||||||
// if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
|
if (content.getKnown().equals(TskData.FileKnown.KNOWN)) {
|
||||||
// return IngestModuleAbstractFile.ProcessResult.OK;
|
return;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// // update the tree every 1000 files if we have EXIF data that is not being being displayed
|
// update the tree every 1000 files if we have EXIF data that is not being being displayed
|
||||||
// filesProcessed++;
|
filesProcessed++;
|
||||||
// if ((filesToFire) && (filesProcessed % 1000 == 0)) {
|
if ((filesToFire) && (filesProcessed % 1000 == 0)) {
|
||||||
// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
||||||
// filesToFire = false;
|
filesToFire = false;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// //skip unsupported
|
//skip unsupported
|
||||||
// if (!parsableFormat(content)) {
|
if (!parsableFormat(content)) {
|
||||||
// return IngestModuleAbstractFile.ProcessResult.OK;
|
return;
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// return processFile(content);
|
processFile(content);
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// public IngestModuleAbstractFile.ProcessResult processFile(AbstractFile f) {
|
public void processFile(AbstractFile f) {
|
||||||
// InputStream in = null;
|
InputStream in = null;
|
||||||
// BufferedInputStream bin = null;
|
BufferedInputStream bin = null;
|
||||||
//
|
|
||||||
// try {
|
try {
|
||||||
// in = new ReadContentInputStream(f);
|
in = new ReadContentInputStream(f);
|
||||||
// bin = new BufferedInputStream(in);
|
bin = new BufferedInputStream(in);
|
||||||
//
|
|
||||||
// Collection<BlackboardAttribute> attributes = new ArrayList<BlackboardAttribute>();
|
Collection<BlackboardAttribute> attributes = new ArrayList<BlackboardAttribute>();
|
||||||
// Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
|
Metadata metadata = ImageMetadataReader.readMetadata(bin, true);
|
||||||
//
|
|
||||||
// // Date
|
// Date
|
||||||
// ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
|
ExifSubIFDDirectory exifDir = metadata.getDirectory(ExifSubIFDDirectory.class);
|
||||||
// if (exifDir != null) {
|
if (exifDir != null) {
|
||||||
// Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
|
Date date = exifDir.getDate(ExifSubIFDDirectory.TAG_DATETIME_ORIGINAL);
|
||||||
// if (date != null) {
|
if (date != null) {
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), MODULE_NAME, date.getTime() / 1000));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED.getTypeID(), getDisplayName(), date.getTime() / 1000));
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// // GPS Stuff
|
// GPS Stuff
|
||||||
// GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
|
GpsDirectory gpsDir = metadata.getDirectory(GpsDirectory.class);
|
||||||
// if (gpsDir != null) {
|
if (gpsDir != null) {
|
||||||
// GeoLocation loc = gpsDir.getGeoLocation();
|
GeoLocation loc = gpsDir.getGeoLocation();
|
||||||
// if (loc != null) {
|
if (loc != null) {
|
||||||
// double latitude = loc.getLatitude();
|
double latitude = loc.getLatitude();
|
||||||
// double longitude = loc.getLongitude();
|
double longitude = loc.getLongitude();
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), MODULE_NAME, latitude));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LATITUDE.getTypeID(), getDisplayName(), latitude));
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), MODULE_NAME, longitude));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE.getTypeID(), getDisplayName(), longitude));
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
|
Rational altitude = gpsDir.getRational(GpsDirectory.TAG_GPS_ALTITUDE);
|
||||||
// if (altitude != null) {
|
if (altitude != null) {
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), MODULE_NAME, altitude.doubleValue()));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_GEO_ALTITUDE.getTypeID(), getDisplayName(), altitude.doubleValue()));
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// // Device info
|
// Device info
|
||||||
// ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
|
ExifIFD0Directory devDir = metadata.getDirectory(ExifIFD0Directory.class);
|
||||||
// if (devDir != null) {
|
if (devDir != null) {
|
||||||
// String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
|
String model = devDir.getString(ExifIFD0Directory.TAG_MODEL);
|
||||||
// if (model != null && !model.isEmpty()) {
|
if (model != null && !model.isEmpty()) {
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), MODULE_NAME, model));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MODEL.getTypeID(), getDisplayName(), model));
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
|
String make = devDir.getString(ExifIFD0Directory.TAG_MAKE);
|
||||||
// if (make != null && !make.isEmpty()) {
|
if (make != null && !make.isEmpty()) {
|
||||||
// attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), MODULE_NAME, make));
|
attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DEVICE_MAKE.getTypeID(), getDisplayName(), make));
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
//
|
|
||||||
// // Add the attributes, if there are any, to a new artifact
|
// Add the attributes, if there are any, to a new artifact
|
||||||
// if (!attributes.isEmpty()) {
|
if (!attributes.isEmpty()) {
|
||||||
// BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
|
BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF);
|
||||||
// bba.addAttributes(attributes);
|
bba.addAttributes(attributes);
|
||||||
// filesToFire = true;
|
filesToFire = true;
|
||||||
// }
|
}
|
||||||
//
|
} catch (TskCoreException ex) {
|
||||||
// return IngestModuleAbstractFile.ProcessResult.OK;
|
logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata ({0}).", ex.getLocalizedMessage());
|
||||||
//
|
} catch (ImageProcessingException ex) {
|
||||||
// } catch (TskCoreException ex) {
|
logger.log(Level.WARNING, "Failed to process the image file: {0}/{1}({2})", new Object[]{f.getParentPath(), f.getName(), ex.getLocalizedMessage()});
|
||||||
// logger.log(Level.WARNING, "Failed to create blackboard artifact for exif metadata (" + ex.getLocalizedMessage() + ").");
|
} catch (IOException ex) {
|
||||||
// } catch (ImageProcessingException ex) {
|
logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex);
|
||||||
// logger.log(Level.WARNING, "Failed to process the image file: " + f.getParentPath() + "/" + f.getName() + "(" + ex.getLocalizedMessage() + ")");
|
} finally {
|
||||||
// } catch (IOException ex) {
|
try {
|
||||||
// logger.log(Level.WARNING, "IOException when parsing image file: " + f.getParentPath() + "/" + f.getName(), ex);
|
if (in != null) {
|
||||||
// } finally {
|
in.close();
|
||||||
// try {
|
}
|
||||||
// if (in != null) {
|
if (bin != null) {
|
||||||
// in.close();
|
bin.close();
|
||||||
// }
|
}
|
||||||
// if (bin != null) {
|
} catch (IOException ex) {
|
||||||
// bin.close();
|
logger.log(Level.WARNING, "Failed to close InputStream.", ex);
|
||||||
// }
|
}
|
||||||
// } catch (IOException ex) {
|
}
|
||||||
// logger.log(Level.WARNING, "Failed to close InputStream.", ex);
|
}
|
||||||
// }
|
|
||||||
// }
|
/**
|
||||||
//
|
* Checks if should try to attempt to extract exif. Currently checks if JPEG
|
||||||
// // If we got here, there was an error
|
* image (by signature)
|
||||||
// return IngestModuleAbstractFile.ProcessResult.ERROR;
|
*
|
||||||
// }
|
* @param f file to be checked
|
||||||
//
|
*
|
||||||
// /**
|
* @return true if to be processed
|
||||||
// * Checks if should try to attempt to extract exif. Currently checks if JPEG
|
*/
|
||||||
// * image (by signature)
|
private boolean parsableFormat(AbstractFile f) {
|
||||||
// *
|
return ImageUtils.isJpegFileHeader(f);
|
||||||
// * @param f file to be checked
|
}
|
||||||
// *
|
|
||||||
// * @return true if to be processed
|
@Override
|
||||||
// */
|
public void complete() {
|
||||||
// private boolean parsableFormat(AbstractFile f) {
|
logger.log(Level.INFO, "completed exif parsing {0}", this.toString());
|
||||||
// return ImageUtils.isJpegFileHeader(f);
|
if (filesToFire) {
|
||||||
// }
|
//send the final new data event
|
||||||
//
|
services.fireModuleDataEvent(new ModuleDataEvent(getDisplayName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
||||||
// @Override
|
}
|
||||||
// public void complete() {
|
}
|
||||||
// logger.log(Level.INFO, "completed exif parsing " + this.toString());
|
|
||||||
// if (filesToFire) {
|
@Override
|
||||||
// //send the final new data event
|
public void stop() {
|
||||||
// services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF));
|
}
|
||||||
// }
|
}
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public String getVersion() {
|
|
||||||
// return MODULE_VERSION;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public String getName() {
|
|
||||||
// return "Exif Image Parser";
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public String getDescription() {
|
|
||||||
// return "Ingests JPEG files and retrieves their EXIF metadata.";
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public void init(IngestModuleInit initContext) {
|
|
||||||
// services = IngestServices.getDefault();
|
|
||||||
// logger.log(Level.INFO, "init() " + this.toString());
|
|
||||||
//
|
|
||||||
// filesProcessed = 0;
|
|
||||||
// filesToFire = false;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public void stop() {
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// @Override
|
|
||||||
// public boolean hasBackgroundJobsRunning() {
|
|
||||||
// return false;
|
|
||||||
// }
|
|
||||||
//}
|
|
62
ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java
Executable file
62
ExifParser/src/org/sleuthkit/autopsy/exifparser/ExifParserModuleFactory.java
Executable file
@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2014 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.sleuthkit.autopsy.exifparser;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import org.openide.util.lookup.ServiceProvider;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Version;
|
||||||
|
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
|
||||||
|
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||||
|
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An factory that creates file ingest modules that do hash database lookups.
|
||||||
|
*/
|
||||||
|
@ServiceProvider(service=IngestModuleFactory.class)
|
||||||
|
public class ExifParserModuleFactory extends AbstractIngestModuleFactory {
|
||||||
|
@Override
|
||||||
|
public String getModuleDisplayName() {
|
||||||
|
return getModuleName();
|
||||||
|
}
|
||||||
|
|
||||||
|
static String getModuleName() {
|
||||||
|
return "Exif Image Parser";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleDescription() {
|
||||||
|
return "Ingests JPEG files and retrieves their EXIF metadata.";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleVersionNumber() {
|
||||||
|
return Version.getVersion();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isFileIngestModuleFactory() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
|
||||||
|
return new ExifParserFileIngestModule();
|
||||||
|
}
|
||||||
|
}
|
@ -66,7 +66,7 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init(long dataSourceTaskId) {
|
public void init(long taskId) {
|
||||||
services = IngestServices.getDefault();
|
services = IngestServices.getDefault();
|
||||||
skCase = Case.getCurrentCase().getSleuthkitCase();
|
skCase = Case.getCurrentCase().getSleuthkitCase();
|
||||||
|
|
||||||
@ -76,22 +76,20 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes();
|
calcHashesIsSet = hashDbManager.getAlwaysCalculateHashes();
|
||||||
|
|
||||||
if (knownHashSets.isEmpty()) {
|
if (knownHashSets.isEmpty()) {
|
||||||
// RJCTODO
|
services.postMessage(IngestMessage.createWarningMessage(++messageId,
|
||||||
// services.postMessage(IngestMessage.createWarningMessage(++messageId,
|
this,
|
||||||
// this,
|
NbBundle.getMessage(this.getClass(),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
"HashDbIngestModule.noKnownHashDbSetMsg"),
|
||||||
// "HashDbIngestModule.noKnownHashDbSetMsg"),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
"HashDbIngestModule.knownFileSearchWillNotExecuteWarn")));
|
||||||
// "HashDbIngestModule.knownFileSearchWillNotExecuteWarn")));
|
|
||||||
}
|
}
|
||||||
if (knownBadHashSets.isEmpty()) {
|
if (knownBadHashSets.isEmpty()) {
|
||||||
// RJCTODO
|
services.postMessage(IngestMessage.createWarningMessage(++messageId,
|
||||||
// services.postMessage(IngestMessage.createWarningMessage(++messageId,
|
this,
|
||||||
// this,
|
NbBundle.getMessage(this.getClass(),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
"HashDbIngestModule.noKnownBadHashDbSetMsg"),
|
||||||
// "HashDbIngestModule.noKnownBadHashDbSetMsg"),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
"HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn")));
|
||||||
// "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn")));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +120,6 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
|
|
||||||
// bail out if we have no hashes set
|
// bail out if we have no hashes set
|
||||||
if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) {
|
if ((knownHashSets.isEmpty()) && (knownBadHashSets.isEmpty()) && (calcHashesIsSet == false)) {
|
||||||
// return ProcessResult.OK;
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -136,14 +133,14 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
calctime += (System.currentTimeMillis() - calcstart);
|
calctime += (System.currentTimeMillis() - calcstart);
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
logger.log(Level.WARNING, "Error calculating hash of file " + name, ex);
|
logger.log(Level.WARNING, "Error calculating hash of file " + name, ex);
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
||||||
// HashDbIngestModule.this,
|
HashDbIngestModule.this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.fileReadErrorMsg",
|
"HashDbIngestModule.fileReadErrorMsg",
|
||||||
// name),
|
name),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.calcHashValueErr",
|
"HashDbIngestModule.calcHashValueErr",
|
||||||
// name)));
|
name)));
|
||||||
// return ProcessResult.ERROR;
|
// return ProcessResult.ERROR;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -163,14 +160,14 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
skCase.setKnown(file, TskData.FileKnown.BAD);
|
skCase.setKnown(file, TskData.FileKnown.BAD);
|
||||||
} catch (TskException ex) {
|
} catch (TskException ex) {
|
||||||
logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex);
|
logger.log(Level.WARNING, "Couldn't set known bad state for file " + name + " - see sleuthkit log for details", ex);
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
||||||
// HashDbIngestModule.this,
|
HashDbIngestModule.this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.hashLookupErrorMsg",
|
"HashDbIngestModule.hashLookupErrorMsg",
|
||||||
// name),
|
name),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.settingKnownBadStateErr",
|
"HashDbIngestModule.settingKnownBadStateErr",
|
||||||
// name)));
|
name)));
|
||||||
// ret = ProcessResult.ERROR;
|
// ret = ProcessResult.ERROR;
|
||||||
}
|
}
|
||||||
String hashSetName = db.getHashSetName();
|
String hashSetName = db.getHashSetName();
|
||||||
@ -194,15 +191,16 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
lookuptime += (System.currentTimeMillis() - lookupstart);
|
lookuptime += (System.currentTimeMillis() - lookupstart);
|
||||||
} catch (TskException ex) {
|
} catch (TskException ex) {
|
||||||
logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex);
|
logger.log(Level.WARNING, "Couldn't lookup known bad hash for file " + name + " - see sleuthkit log for details", ex);
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
||||||
// HashDbIngestModule.this,
|
HashDbIngestModule.this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.hashLookupErrorMsg",
|
"HashDbIngestModule.hashLookupErrorMsg",
|
||||||
// name),
|
name),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.lookingUpKnownBadHashValueErr",
|
"HashDbIngestModule.lookingUpKnownBadHashValueErr",
|
||||||
// name)));
|
name)));
|
||||||
// ret = ProcessResult.ERROR;
|
// RJCTODO
|
||||||
|
// ret = ProcessResult.ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -219,29 +217,23 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
break;
|
break;
|
||||||
} catch (TskException ex) {
|
} catch (TskException ex) {
|
||||||
logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex);
|
logger.log(Level.WARNING, "Couldn't set known state for file " + name + " - see sleuthkit log for details", ex);
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
// RJCTODO
|
||||||
// HashDbIngestModule.this,
|
// ret = ProcessResult.ERROR;
|
||||||
// NbBundle.getMessage(this.getClass(),
|
|
||||||
// "HashDbIngestModule.hashLookupErrorMsg",
|
|
||||||
// name),
|
|
||||||
// NbBundle.getMessage(this.getClass(),
|
|
||||||
// "HashDbIngestModule.settingsKnownStateErr",
|
|
||||||
// name)));
|
|
||||||
// ret = ProcessResult.ERROR;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
lookuptime += (System.currentTimeMillis() - lookupstart);
|
lookuptime += (System.currentTimeMillis() - lookupstart);
|
||||||
} catch (TskException ex) {
|
} catch (TskException ex) {
|
||||||
logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex);
|
logger.log(Level.WARNING, "Couldn't lookup known hash for file " + name + " - see sleuthkit log for details", ex);
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
services.postMessage(IngestMessage.createErrorMessage(++messageId,
|
||||||
// HashDbIngestModule.this,
|
HashDbIngestModule.this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.hashLookupErrorMsg",
|
"HashDbIngestModule.hashLookupErrorMsg",
|
||||||
// name),
|
name),
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.lookingUpKnownHashValueErr",
|
"HashDbIngestModule.lookingUpKnownHashValueErr",
|
||||||
// name)));
|
name)));
|
||||||
// ret = ProcessResult.ERROR;
|
// RJCTODO
|
||||||
|
// ret = ProcessResult.ERROR;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -294,13 +286,13 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
|
|
||||||
detailsSb.append("</table>");
|
detailsSb.append("</table>");
|
||||||
|
|
||||||
// services.postMessage(IngestMessage.createDataMessage(++messageId, this,
|
services.postMessage(IngestMessage.createDataMessage(++messageId, this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.postToBB.knownBadMsg",
|
"HashDbIngestModule.postToBB.knownBadMsg",
|
||||||
// abstractFile.getName()),
|
abstractFile.getName()),
|
||||||
// detailsSb.toString(),
|
detailsSb.toString(),
|
||||||
// abstractFile.getName() + md5Hash,
|
abstractFile.getName() + md5Hash,
|
||||||
// badFile));
|
badFile));
|
||||||
}
|
}
|
||||||
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
|
services.fireModuleDataEvent(new ModuleDataEvent(MODULE_NAME, ARTIFACT_TYPE.TSK_HASHSET_HIT, Collections.singletonList(badFile)));
|
||||||
} catch (TskException ex) {
|
} catch (TskException ex) {
|
||||||
@ -337,12 +329,12 @@ public class HashDbIngestModule implements FileIngestModule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
detailsSb.append("</ul>");
|
detailsSb.append("</ul>");
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId,
|
services.postMessage(IngestMessage.createMessage(++messageId,
|
||||||
// IngestMessage.MessageType.INFO,
|
IngestMessage.MessageType.INFO,
|
||||||
// this,
|
this,
|
||||||
// NbBundle.getMessage(this.getClass(),
|
NbBundle.getMessage(this.getClass(),
|
||||||
// "HashDbIngestModule.complete.hashLookupResults"),
|
"HashDbIngestModule.complete.hashLookupResults"),
|
||||||
// detailsSb.toString()));
|
detailsSb.toString()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,10 +50,10 @@ import org.sleuthkit.autopsy.coreutils.MessageNotifyUtil;
|
|||||||
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
import org.sleuthkit.autopsy.coreutils.StopWatch;
|
||||||
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
|
import org.sleuthkit.autopsy.coreutils.StringExtract.StringExtractUnicodeTable.SCRIPT;
|
||||||
import org.sleuthkit.autopsy.coreutils.Version;
|
import org.sleuthkit.autopsy.coreutils.Version;
|
||||||
|
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestServices;
|
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
|
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
|
||||||
import org.sleuthkit.autopsy.ingest.IngestModuleInit;
|
|
||||||
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
|
||||||
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
|
import org.sleuthkit.autopsy.keywordsearch.Ingester.IngesterException;
|
||||||
import org.sleuthkit.datamodel.BlackboardArtifact;
|
import org.sleuthkit.datamodel.BlackboardArtifact;
|
||||||
@ -77,7 +77,7 @@ import org.sleuthkit.datamodel.TskData.FileKnown;
|
|||||||
*
|
*
|
||||||
* Registered as a module in layer.xml
|
* Registered as a module in layer.xml
|
||||||
*/
|
*/
|
||||||
public final class KeywordSearchIngestModule {
|
public final class KeywordSearchIngestModule implements FileIngestModule {
|
||||||
|
|
||||||
enum UpdateFrequency {
|
enum UpdateFrequency {
|
||||||
|
|
||||||
@ -102,7 +102,6 @@ public final class KeywordSearchIngestModule {
|
|||||||
public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class,
|
public static final String MODULE_DESCRIPTION = NbBundle.getMessage(KeywordSearchIngestModule.class,
|
||||||
"KeywordSearchIngestModule.moduleDescription");
|
"KeywordSearchIngestModule.moduleDescription");
|
||||||
final public static String MODULE_VERSION = Version.getVersion();
|
final public static String MODULE_VERSION = Version.getVersion();
|
||||||
private static KeywordSearchIngestModule instance = null;
|
|
||||||
private IngestServices services;
|
private IngestServices services;
|
||||||
private Ingester ingester = null;
|
private Ingester ingester = null;
|
||||||
private volatile boolean commitIndex = false; //whether to commit index next time
|
private volatile boolean commitIndex = false; //whether to commit index next time
|
||||||
@ -146,28 +145,110 @@ public final class KeywordSearchIngestModule {
|
|||||||
};
|
};
|
||||||
private Map<Long, IngestStatus> ingestStatus;
|
private Map<Long, IngestStatus> ingestStatus;
|
||||||
|
|
||||||
//private constructor to ensure singleton instance
|
KeywordSearchIngestModule() {
|
||||||
private KeywordSearchIngestModule() {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns singleton instance of the module, creates one if needed
|
* Initializes the module for new ingest run Sets up threads, timers,
|
||||||
|
* retrieves settings, keyword lists to run on
|
||||||
*
|
*
|
||||||
* @return instance of the module
|
|
||||||
*/
|
*/
|
||||||
public static synchronized KeywordSearchIngestModule getDefault() {
|
@Override
|
||||||
if (instance == null) {
|
public void init(long taskId) {
|
||||||
instance = new KeywordSearchIngestModule();
|
logger.log(Level.INFO, "init()");
|
||||||
}
|
services = IngestServices.getDefault();
|
||||||
return instance;
|
initialized = false;
|
||||||
}
|
|
||||||
|
|
||||||
|
caseHandle = Case.getCurrentCase().getSleuthkitCase();
|
||||||
|
|
||||||
|
tikaFormatDetector = new Tika();
|
||||||
|
|
||||||
|
ingester = Server.getIngester();
|
||||||
|
|
||||||
|
final Server server = KeywordSearch.getServer();
|
||||||
|
try {
|
||||||
|
if (!server.isRunning()) {
|
||||||
|
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
|
||||||
|
logger.log(Level.SEVERE, msg);
|
||||||
|
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
|
||||||
|
services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
|
||||||
|
return;
|
||||||
|
|
||||||
|
}
|
||||||
|
} catch (KeywordSearchModuleException ex) {
|
||||||
|
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
|
||||||
|
//this means Solr is not properly initialized
|
||||||
|
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
|
||||||
|
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
|
||||||
|
services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//initialize extractors
|
||||||
|
stringExtractor = new AbstractFileStringExtract();
|
||||||
|
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
|
||||||
|
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
|
||||||
|
|
||||||
|
|
||||||
|
//log the scripts used for debugging
|
||||||
|
final StringBuilder sbScripts = new StringBuilder();
|
||||||
|
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
|
||||||
|
sbScripts.append(s.name()).append(" ");
|
||||||
|
}
|
||||||
|
logger.log(Level.INFO, "Using string extract scripts: {0}", sbScripts.toString());
|
||||||
|
|
||||||
|
textExtractors = new ArrayList<>();
|
||||||
|
//order matters, more specific extractors first
|
||||||
|
textExtractors.add(new AbstractFileHtmlExtract());
|
||||||
|
textExtractors.add(new AbstractFileTikaTextExtract());
|
||||||
|
|
||||||
|
|
||||||
|
ingestStatus = new HashMap<>();
|
||||||
|
|
||||||
|
keywords = new ArrayList<>();
|
||||||
|
keywordLists = new ArrayList<>();
|
||||||
|
keywordToList = new HashMap<>();
|
||||||
|
|
||||||
|
initKeywords();
|
||||||
|
|
||||||
|
if (keywords.isEmpty() || keywordLists.isEmpty()) {
|
||||||
|
services.postMessage(IngestMessage.createWarningMessage(++messageID, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
|
||||||
|
NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
|
||||||
|
}
|
||||||
|
|
||||||
|
processedFiles = false;
|
||||||
|
finalSearcherDone = false;
|
||||||
|
searcherDone = true; //make sure to start the initial currentSearcher
|
||||||
|
//keeps track of all results per run not to repeat reporting the same hits
|
||||||
|
currentResults = new HashMap<>();
|
||||||
|
|
||||||
|
curDataSourceIds = new HashSet<>();
|
||||||
|
|
||||||
|
indexer = new Indexer();
|
||||||
|
|
||||||
|
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
|
||||||
|
logger.log(Level.INFO, "Using commit interval (ms): {0}", updateIntervalMs);
|
||||||
|
logger.log(Level.INFO, "Using searcher interval (ms): {0}", updateIntervalMs);
|
||||||
|
|
||||||
|
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
|
||||||
|
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
|
||||||
|
|
||||||
|
initialized = true;
|
||||||
|
|
||||||
|
commitTimer.start();
|
||||||
|
searchTimer.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public void process(AbstractFile abstractFile) {
|
public void process(AbstractFile abstractFile) {
|
||||||
|
|
||||||
if (initialized == false) //error initializing indexing/Solr
|
if (initialized == false) //error initializing indexing/Solr
|
||||||
{
|
{
|
||||||
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: " + abstractFile.getName());
|
logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName());
|
||||||
ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
|
ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING);
|
||||||
|
return;
|
||||||
|
// RJCTODO
|
||||||
// return ProcessResult.OK;
|
// return ProcessResult.OK;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
@ -181,9 +262,12 @@ public final class KeywordSearchIngestModule {
|
|||||||
|
|
||||||
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
|
if (abstractFile.getType().equals(TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR)) {
|
||||||
//skip indexing of virtual dirs (no content, no real name) - will index children files
|
//skip indexing of virtual dirs (no content, no real name) - will index children files
|
||||||
|
return;
|
||||||
|
// RJCTODO
|
||||||
// return ProcessResult.OK;
|
// return ProcessResult.OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RJCTODO
|
||||||
//check if we should index meta-data only when 1) it is known 2) HashDb module errored on it
|
//check if we should index meta-data only when 1) it is known 2) HashDb module errored on it
|
||||||
// if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) {
|
// if (services.getAbstractFileModuleResult(hashDBModuleName) == IngestModuleAbstractFile.ProcessResult.ERROR) {
|
||||||
// indexer.indexFile(abstractFile, false);
|
// indexer.indexFile(abstractFile, false);
|
||||||
@ -195,7 +279,8 @@ public final class KeywordSearchIngestModule {
|
|||||||
if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) {
|
if (KeywordSearchSettings.getSkipKnown() && abstractFile.getKnown().equals(FileKnown.KNOWN)) {
|
||||||
//index meta-data only
|
//index meta-data only
|
||||||
indexer.indexFile(abstractFile, false);
|
indexer.indexFile(abstractFile, false);
|
||||||
// return ProcessResult.OK;
|
// RJCTODO
|
||||||
|
// return ProcessResult.OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
processedFiles = true;
|
processedFiles = true;
|
||||||
@ -206,6 +291,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
//index the file and content (if the content is supported)
|
//index the file and content (if the content is supported)
|
||||||
indexer.indexFile(abstractFile, true);
|
indexer.indexFile(abstractFile, true);
|
||||||
|
|
||||||
|
// RJCTODO
|
||||||
// return ProcessResult.OK;
|
// return ProcessResult.OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -213,6 +299,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
* After all files are ingested, execute final index commit and final search
|
* After all files are ingested, execute final index commit and final search
|
||||||
* Cleanup resources, threads, timers
|
* Cleanup resources, threads, timers
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void complete() {
|
public void complete() {
|
||||||
if (initialized == false) {
|
if (initialized == false) {
|
||||||
return;
|
return;
|
||||||
@ -249,12 +336,10 @@ public final class KeywordSearchIngestModule {
|
|||||||
try {
|
try {
|
||||||
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
|
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
|
||||||
final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
|
final int numIndexedChunks = KeywordSearch.getServer().queryNumIndexedChunks();
|
||||||
logger.log(Level.INFO, "Indexed files count: " + numIndexedFiles);
|
logger.log(Level.INFO, "Indexed files count: {0}", numIndexedFiles);
|
||||||
logger.log(Level.INFO, "Indexed file chunks count: " + numIndexedChunks);
|
logger.log(Level.INFO, "Indexed file chunks count: {0}", numIndexedChunks);
|
||||||
} catch (NoOpenCoreException ex) {
|
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
|
||||||
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex);
|
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", ex);
|
||||||
} catch (KeywordSearchModuleException se) {
|
|
||||||
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files/chunks: ", se);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//cleanup done in final searcher
|
//cleanup done in final searcher
|
||||||
@ -265,6 +350,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
/**
|
/**
|
||||||
* Handle stop event (ingest interrupted) Cleanup resources, threads, timers
|
* Handle stop event (ingest interrupted) Cleanup resources, threads, timers
|
||||||
*/
|
*/
|
||||||
|
@Override
|
||||||
public void stop() {
|
public void stop() {
|
||||||
logger.log(Level.INFO, "stop()");
|
logger.log(Level.INFO, "stop()");
|
||||||
|
|
||||||
@ -319,152 +405,20 @@ public final class KeywordSearchIngestModule {
|
|||||||
initialized = false;
|
initialized = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
// RJCTODO
|
||||||
return MODULE_NAME;
|
// public void saveAdvancedConfiguration() {
|
||||||
}
|
// if (advancedConfigPanel != null) {
|
||||||
|
// advancedConfigPanel.store();
|
||||||
public String getDescription() {
|
// }
|
||||||
return MODULE_DESCRIPTION;
|
//
|
||||||
}
|
// if (simpleConfigPanel != null) {
|
||||||
|
// simpleConfigPanel.load();
|
||||||
public String getVersion() {
|
// }
|
||||||
return MODULE_VERSION;
|
// }
|
||||||
}
|
//
|
||||||
|
// public void saveSimpleConfiguration() {
|
||||||
/**
|
// KeywordSearchListsXML.getCurrent().save();
|
||||||
* Initializes the module for new ingest run Sets up threads, timers,
|
// }
|
||||||
* retrieves settings, keyword lists to run on
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public void init(IngestModuleInit initContext) {
|
|
||||||
logger.log(Level.INFO, "init()");
|
|
||||||
services = IngestServices.getDefault();
|
|
||||||
initialized = false;
|
|
||||||
|
|
||||||
caseHandle = Case.getCurrentCase().getSleuthkitCase();
|
|
||||||
|
|
||||||
tikaFormatDetector = new Tika();
|
|
||||||
|
|
||||||
ingester = Server.getIngester();
|
|
||||||
|
|
||||||
final Server server = KeywordSearch.getServer();
|
|
||||||
try {
|
|
||||||
if (!server.isRunning()) {
|
|
||||||
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
|
|
||||||
logger.log(Level.SEVERE, msg);
|
|
||||||
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
|
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
|
|
||||||
return;
|
|
||||||
|
|
||||||
}
|
|
||||||
} catch (KeywordSearchModuleException ex) {
|
|
||||||
logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex);
|
|
||||||
//this means Solr is not properly initialized
|
|
||||||
String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg");
|
|
||||||
String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg);
|
|
||||||
// services.postMessage(IngestMessage.createErrorMessage(++messageID, instance, msg, details));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
//initialize extractors
|
|
||||||
stringExtractor = new AbstractFileStringExtract();
|
|
||||||
stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts());
|
|
||||||
stringExtractor.setOptions(KeywordSearchSettings.getStringExtractOptions());
|
|
||||||
|
|
||||||
|
|
||||||
//log the scripts used for debugging
|
|
||||||
final StringBuilder sbScripts = new StringBuilder();
|
|
||||||
for (SCRIPT s : KeywordSearchSettings.getStringExtractScripts()) {
|
|
||||||
sbScripts.append(s.name()).append(" ");
|
|
||||||
}
|
|
||||||
logger.log(Level.INFO, "Using string extract scripts: " + sbScripts.toString());
|
|
||||||
|
|
||||||
textExtractors = new ArrayList<AbstractFileExtract>();
|
|
||||||
//order matters, more specific extractors first
|
|
||||||
textExtractors.add(new AbstractFileHtmlExtract());
|
|
||||||
textExtractors.add(new AbstractFileTikaTextExtract());
|
|
||||||
|
|
||||||
|
|
||||||
ingestStatus = new HashMap<Long, IngestStatus>();
|
|
||||||
|
|
||||||
keywords = new ArrayList<Keyword>();
|
|
||||||
keywordLists = new ArrayList<String>();
|
|
||||||
keywordToList = new HashMap<String, KeywordSearchListsAbstract.KeywordSearchList>();
|
|
||||||
|
|
||||||
initKeywords();
|
|
||||||
|
|
||||||
if (keywords.isEmpty() || keywordLists.isEmpty()) {
|
|
||||||
// services.postMessage(IngestMessage.createWarningMessage(++messageID, instance, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"),
|
|
||||||
// NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg")));
|
|
||||||
}
|
|
||||||
|
|
||||||
processedFiles = false;
|
|
||||||
finalSearcherDone = false;
|
|
||||||
searcherDone = true; //make sure to start the initial currentSearcher
|
|
||||||
//keeps track of all results per run not to repeat reporting the same hits
|
|
||||||
currentResults = new HashMap<Keyword, List<Long>>();
|
|
||||||
|
|
||||||
curDataSourceIds = new HashSet<Long>();
|
|
||||||
|
|
||||||
indexer = new Indexer();
|
|
||||||
|
|
||||||
final int updateIntervalMs = KeywordSearchSettings.getUpdateFrequency().getTime() * 60 * 1000;
|
|
||||||
logger.log(Level.INFO, "Using commit interval (ms): " + updateIntervalMs);
|
|
||||||
logger.log(Level.INFO, "Using searcher interval (ms): " + updateIntervalMs);
|
|
||||||
|
|
||||||
commitTimer = new Timer(updateIntervalMs, new CommitTimerAction());
|
|
||||||
searchTimer = new Timer(updateIntervalMs, new SearchTimerAction());
|
|
||||||
|
|
||||||
initialized = true;
|
|
||||||
|
|
||||||
commitTimer.start();
|
|
||||||
searchTimer.start();
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasSimpleConfiguration() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean hasAdvancedConfiguration() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public javax.swing.JPanel getSimpleConfiguration(String context) {
|
|
||||||
KeywordSearchListsXML.getCurrent().reload();
|
|
||||||
|
|
||||||
if (null == simpleConfigPanel) {
|
|
||||||
simpleConfigPanel = new KeywordSearchIngestSimplePanel();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
simpleConfigPanel.load();
|
|
||||||
}
|
|
||||||
|
|
||||||
return simpleConfigPanel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public javax.swing.JPanel getAdvancedConfiguration(String context) {
|
|
||||||
if (advancedConfigPanel == null) {
|
|
||||||
advancedConfigPanel = new KeywordSearchConfigurationPanel();
|
|
||||||
}
|
|
||||||
|
|
||||||
advancedConfigPanel.load();
|
|
||||||
return advancedConfigPanel;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void saveAdvancedConfiguration() {
|
|
||||||
if (advancedConfigPanel != null) {
|
|
||||||
advancedConfigPanel.store();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (simpleConfigPanel != null) {
|
|
||||||
simpleConfigPanel.load();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public void saveSimpleConfiguration() {
|
|
||||||
KeywordSearchListsXML.getCurrent().save();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The modules maintains background threads, return true if background
|
* The modules maintains background threads, return true if background
|
||||||
@ -473,15 +427,15 @@ public final class KeywordSearchIngestModule {
|
|||||||
*
|
*
|
||||||
* @return
|
* @return
|
||||||
*/
|
*/
|
||||||
public boolean hasBackgroundJobsRunning() {
|
// RJCTODO:
|
||||||
if ((currentSearcher != null && searcherDone == false)
|
// public boolean hasBackgroundJobsRunning() {
|
||||||
|| (finalSearcherDone == false)) {
|
// if ((currentSearcher != null && searcherDone == false)
|
||||||
return true;
|
// || (finalSearcherDone == false)) {
|
||||||
} else {
|
// return true;
|
||||||
return false;
|
// } else {
|
||||||
}
|
// return false;
|
||||||
|
// }
|
||||||
}
|
// }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Commits index and notifies listeners of index update
|
* Commits index and notifies listeners of index update
|
||||||
@ -540,7 +494,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
msg.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("</td><td>").append(error_io).append("</td></tr>");
|
msg.append("<tr><td>").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.errIoLbl")).append("</td><td>").append(error_io).append("</td></tr>");
|
||||||
msg.append("</table>");
|
msg.append("</table>");
|
||||||
String indexStats = msg.toString();
|
String indexStats = msg.toString();
|
||||||
logger.log(Level.INFO, "Keyword Indexing Completed: " + indexStats);
|
logger.log(Level.INFO, "Keyword Indexing Completed: {0}", indexStats);
|
||||||
// RJCTODO
|
// RJCTODO
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats));
|
// services.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.kwIdxResultsLbl"), indexStats));
|
||||||
if (error_index > 0) {
|
if (error_index > 0) {
|
||||||
@ -561,10 +515,8 @@ public final class KeywordSearchIngestModule {
|
|||||||
try {
|
try {
|
||||||
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
|
final int numIndexedFiles = KeywordSearch.getServer().queryNumIndexedFiles();
|
||||||
KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles));
|
KeywordSearch.fireNumIndexedFilesChange(null, new Integer(numIndexedFiles));
|
||||||
} catch (NoOpenCoreException ex) {
|
} catch (NoOpenCoreException | KeywordSearchModuleException ex) {
|
||||||
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex);
|
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", ex);
|
||||||
} catch (KeywordSearchModuleException se) {
|
|
||||||
logger.log(Level.WARNING, "Error executing Solr query to check number of indexed files: ", se);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -611,7 +563,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.log(Level.INFO, "Set new effective keyword lists: " + sb.toString());
|
logger.log(Level.INFO, "Set new effective keyword lists: {0}", sb.toString());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -703,8 +655,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (fileExtract == null) {
|
if (fileExtract == null) {
|
||||||
logger.log(Level.INFO, "No text extractor found for file id:"
|
logger.log(Level.INFO, "No text extractor found for file id:{0}, name: {1}, detected format: {2}", new Object[]{aFile.getId(), aFile.getName(), detectedFormat});
|
||||||
+ aFile.getId() + ", name: " + aFile.getName() + ", detected format: " + detectedFormat);
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -727,7 +678,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED);
|
ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED);
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
|
||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -833,7 +784,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
try {
|
try {
|
||||||
//logger.log(Level.INFO, "indexing: " + aFile.getName());
|
//logger.log(Level.INFO, "indexing: " + aFile.getName());
|
||||||
if (!extractTextAndIndex(aFile, detectedFormat)) {
|
if (!extractTextAndIndex(aFile, detectedFormat)) {
|
||||||
logger.log(Level.WARNING, "Failed to extract text and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").");
|
logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()});
|
||||||
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
|
ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT);
|
||||||
} else {
|
} else {
|
||||||
ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED);
|
ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED);
|
||||||
@ -877,9 +828,9 @@ public final class KeywordSearchIngestModule {
|
|||||||
private boolean finalRun = false;
|
private boolean finalRun = false;
|
||||||
|
|
||||||
Searcher(List<String> keywordLists) {
|
Searcher(List<String> keywordLists) {
|
||||||
this.keywordLists = new ArrayList<String>(keywordLists);
|
this.keywordLists = new ArrayList<>(keywordLists);
|
||||||
this.keywords = new ArrayList<Keyword>();
|
this.keywords = new ArrayList<>();
|
||||||
this.keywordToList = new HashMap<String, KeywordSearchListsAbstract.KeywordSearchList>();
|
this.keywordToList = new HashMap<>();
|
||||||
//keywords are populated as searcher runs
|
//keywords are populated as searcher runs
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -944,7 +895,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
|
|
||||||
for (Keyword keywordQuery : keywords) {
|
for (Keyword keywordQuery : keywords) {
|
||||||
if (this.isCancelled()) {
|
if (this.isCancelled()) {
|
||||||
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: " + keywordQuery.getQuery());
|
logger.log(Level.INFO, "Cancel detected, bailing before new keyword processed: {0}", keywordQuery.getQuery());
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -975,7 +926,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds);
|
final KeywordQueryFilter dataSourceFilter = new KeywordQueryFilter(KeywordQueryFilter.FilterType.DATA_SOURCE, curDataSourceIds);
|
||||||
del.addFilter(dataSourceFilter);
|
del.addFilter(dataSourceFilter);
|
||||||
|
|
||||||
Map<String, List<ContentHit>> queryResult = null;
|
Map<String, List<ContentHit>> queryResult;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
queryResult = del.performQuery();
|
queryResult = del.performQuery();
|
||||||
@ -986,7 +937,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
//likely case has closed and threads are being interrupted
|
//likely case has closed and threads are being interrupted
|
||||||
return null;
|
return null;
|
||||||
} catch (CancellationException e) {
|
} catch (CancellationException e) {
|
||||||
logger.log(Level.INFO, "Cancel detected, bailing during keyword query: " + keywordQuery.getQuery());
|
logger.log(Level.INFO, "Cancel detected, bailing during keyword query: {0}", keywordQuery.getQuery());
|
||||||
return null;
|
return null;
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
|
logger.log(Level.WARNING, "Error performing query: " + keywordQuery.getQuery(), e);
|
||||||
@ -1002,7 +953,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
//write results to BB
|
//write results to BB
|
||||||
|
|
||||||
//new artifacts created, to report to listeners
|
//new artifacts created, to report to listeners
|
||||||
Collection<BlackboardArtifact> newArtifacts = new ArrayList<BlackboardArtifact>();
|
Collection<BlackboardArtifact> newArtifacts = new ArrayList<>();
|
||||||
|
|
||||||
//scale progress bar more more granular, per result sub-progress, within per keyword
|
//scale progress bar more more granular, per result sub-progress, within per keyword
|
||||||
int totalUnits = newResults.size();
|
int totalUnits = newResults.size();
|
||||||
@ -1019,7 +970,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
for (final Keyword hitTerm : newResults.keySet()) {
|
for (final Keyword hitTerm : newResults.keySet()) {
|
||||||
//checking for cancellation between results
|
//checking for cancellation between results
|
||||||
if (this.isCancelled()) {
|
if (this.isCancelled()) {
|
||||||
logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: " + keywordQuery.getQuery());
|
logger.log(Level.INFO, "Cancel detected, bailing before new hit processed for query: {0}", keywordQuery.getQuery());
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1036,7 +987,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
for (final AbstractFile hitFile : contentHitsFlattened.keySet()) {
|
for (final AbstractFile hitFile : contentHitsFlattened.keySet()) {
|
||||||
|
|
||||||
// get the snippet for the first hit in the file
|
// get the snippet for the first hit in the file
|
||||||
String snippet = null;
|
String snippet;
|
||||||
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery());
|
final String snippetQuery = KeywordSearchUtil.escapeLuceneQuery(hitTerm.getQuery());
|
||||||
int chunkId = contentHitsFlattened.get(hitFile);
|
int chunkId = contentHitsFlattened.get(hitFile);
|
||||||
try {
|
try {
|
||||||
@ -1053,7 +1004,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
// write the blackboard artifact for this keyword in this file
|
// write the blackboard artifact for this keyword in this file
|
||||||
KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName);
|
KeywordWriteResult written = del.writeToBlackBoard(hitTerm.getQuery(), hitFile, snippet, listName);
|
||||||
if (written == null) {
|
if (written == null) {
|
||||||
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: " + hitFile + ", hit: " + hitTerm.toString());
|
logger.log(Level.WARNING, "BB artifact for keyword hit not written, file: {0}, hit: {1}", new Object[]{hitFile, hitTerm.toString()});
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1128,7 +1079,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
}
|
}
|
||||||
detailsSb.append("</table>");
|
detailsSb.append("</table>");
|
||||||
|
|
||||||
// services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
|
services.postMessage(IngestMessage.createDataMessage(++messageID, instance, subjectSb.toString(), detailsSb.toString(), uniqueKey, written.getArtifact()));
|
||||||
}
|
}
|
||||||
} //for each file hit
|
} //for each file hit
|
||||||
|
|
||||||
@ -1156,7 +1107,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
try {
|
try {
|
||||||
finalizeSearcher();
|
finalizeSearcher();
|
||||||
stopWatch.stop();
|
stopWatch.stop();
|
||||||
logger.log(Level.INFO, "Searcher took to run: " + stopWatch.getElapsedTimeSecs() + " secs.");
|
logger.log(Level.INFO, "Searcher took to run: {0} secs.", stopWatch.getElapsedTimeSecs());
|
||||||
} finally {
|
} finally {
|
||||||
searcherLock.unlock();
|
searcherLock.unlock();
|
||||||
}
|
}
|
||||||
@ -1226,13 +1177,13 @@ public final class KeywordSearchIngestModule {
|
|||||||
//calculate new results but substracting results already obtained in this ingest
|
//calculate new results but substracting results already obtained in this ingest
|
||||||
//update currentResults map with the new results
|
//update currentResults map with the new results
|
||||||
private Map<Keyword, List<ContentHit>> filterResults(Map<String, List<ContentHit>> queryResult, boolean isRegex) {
|
private Map<Keyword, List<ContentHit>> filterResults(Map<String, List<ContentHit>> queryResult, boolean isRegex) {
|
||||||
Map<Keyword, List<ContentHit>> newResults = new HashMap<Keyword, List<ContentHit>>();
|
Map<Keyword, List<ContentHit>> newResults = new HashMap<>();
|
||||||
|
|
||||||
for (String termResult : queryResult.keySet()) {
|
for (String termResult : queryResult.keySet()) {
|
||||||
List<ContentHit> queryTermResults = queryResult.get(termResult);
|
List<ContentHit> queryTermResults = queryResult.get(termResult);
|
||||||
|
|
||||||
//translate to list of IDs that we keep track of
|
//translate to list of IDs that we keep track of
|
||||||
List<Long> queryTermResultsIDs = new ArrayList<Long>();
|
List<Long> queryTermResultsIDs = new ArrayList<>();
|
||||||
for (ContentHit ch : queryTermResults) {
|
for (ContentHit ch : queryTermResults) {
|
||||||
queryTermResultsIDs.add(ch.getId());
|
queryTermResultsIDs.add(ch.getId());
|
||||||
}
|
}
|
||||||
@ -1249,7 +1200,7 @@ public final class KeywordSearchIngestModule {
|
|||||||
//add to new results
|
//add to new results
|
||||||
List<ContentHit> newResultsFs = newResults.get(termResultK);
|
List<ContentHit> newResultsFs = newResults.get(termResultK);
|
||||||
if (newResultsFs == null) {
|
if (newResultsFs == null) {
|
||||||
newResultsFs = new ArrayList<ContentHit>();
|
newResultsFs = new ArrayList<>();
|
||||||
newResults.put(termResultK, newResultsFs);
|
newResults.put(termResultK, newResultsFs);
|
||||||
}
|
}
|
||||||
newResultsFs.add(res);
|
newResultsFs.add(res);
|
||||||
|
@ -0,0 +1,121 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2014 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.sleuthkit.autopsy.keywordsearch;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import javax.swing.JPanel;
|
||||||
|
import org.openide.util.NbBundle;
|
||||||
|
import org.openide.util.lookup.ServiceProvider;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Version;
|
||||||
|
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
|
||||||
|
import org.sleuthkit.autopsy.ingest.FileIngestModule;
|
||||||
|
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An factory that creates file ingest modules that do keyword searches.
|
||||||
|
*/
|
||||||
|
@ServiceProvider(service=IngestModuleFactory.class)
|
||||||
|
public class KeywordSearchModuleFactory extends AbstractIngestModuleFactory {
|
||||||
|
@Override
|
||||||
|
public String getModuleDisplayName() {
|
||||||
|
return getModuleName();
|
||||||
|
}
|
||||||
|
|
||||||
|
static String getModuleName() {
|
||||||
|
return NbBundle.getMessage(KeywordSearchIngestModule.class, "KeywordSearchIngestModule.moduleName");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleDescription() {
|
||||||
|
return NbBundle.getMessage(KeywordSearchIngestModule.class, "HashDbInKeywordSearchIngestModulegestModule.moduleDescription");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleVersionNumber() {
|
||||||
|
return Version.getVersion();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Serializable getDefaultIngestOptions() {
|
||||||
|
return new IngestOptions();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean providesIngestOptionsPanels() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JPanel getIngestOptionsPanel(Serializable ingestOptions) {
|
||||||
|
KeywordSearchListsXML.getCurrent().reload();
|
||||||
|
return new KeywordSearchIngestSimplePanel(); // RJCTODO: Load required?
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Serializable getIngestOptionsFromPanel(JPanel ingestOptionsPanel) throws IngestModuleFactory.InvalidOptionsException {
|
||||||
|
if (!(ingestOptionsPanel instanceof KeywordSearchIngestSimplePanel)) {
|
||||||
|
throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO
|
||||||
|
}
|
||||||
|
|
||||||
|
KeywordSearchIngestSimplePanel panel = (KeywordSearchIngestSimplePanel)ingestOptionsPanel;
|
||||||
|
panel.store();
|
||||||
|
|
||||||
|
return new IngestOptions(); // RJCTODO
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean providesGlobalOptionsPanels() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JPanel getGlobalOptionsPanel() {
|
||||||
|
KeywordSearchConfigurationPanel globalOptionsPanel = new KeywordSearchConfigurationPanel();
|
||||||
|
globalOptionsPanel.load();
|
||||||
|
return globalOptionsPanel;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void saveGlobalOptionsFromPanel(JPanel globalOptionsPanel) throws IngestModuleFactory.InvalidOptionsException {
|
||||||
|
if (!(globalOptionsPanel instanceof KeywordSearchConfigurationPanel)) {
|
||||||
|
throw new IngestModuleFactory.InvalidOptionsException(""); // RJCTODO
|
||||||
|
}
|
||||||
|
|
||||||
|
KeywordSearchConfigurationPanel panel = (KeywordSearchConfigurationPanel)globalOptionsPanel;
|
||||||
|
panel.store();
|
||||||
|
// RJCTODO: Need simple panel store? May need to change implementation...see also hash db factory
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isFileIngestModuleFactory() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FileIngestModule createFileIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
|
||||||
|
return new KeywordSearchIngestModule();
|
||||||
|
}
|
||||||
|
|
||||||
|
private static class IngestOptions implements Serializable {
|
||||||
|
// RJCTODO: Any options here?
|
||||||
|
// boolean alwaysCalcHashes = true;
|
||||||
|
// ArrayList<String> hashSetNames = new ArrayList<>();
|
||||||
|
}
|
||||||
|
}
|
@ -6,6 +6,14 @@
|
|||||||
<code-name-base>org.sleuthkit.autopsy.ewfverify</code-name-base>
|
<code-name-base>org.sleuthkit.autopsy.ewfverify</code-name-base>
|
||||||
<suite-component/>
|
<suite-component/>
|
||||||
<module-dependencies>
|
<module-dependencies>
|
||||||
|
<dependency>
|
||||||
|
<code-name-base>org.openide.util.lookup</code-name-base>
|
||||||
|
<build-prerequisite/>
|
||||||
|
<compile-dependency/>
|
||||||
|
<run-dependency>
|
||||||
|
<specification-version>8.19.1</specification-version>
|
||||||
|
</run-dependency>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
|
<code-name-base>org.sleuthkit.autopsy.core</code-name-base>
|
||||||
<build-prerequisite/>
|
<build-prerequisite/>
|
||||||
|
63
ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java
Executable file
63
ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifierModuleFactory.java
Executable file
@ -0,0 +1,63 @@
|
|||||||
|
/*
|
||||||
|
* Autopsy Forensic Browser
|
||||||
|
*
|
||||||
|
* Copyright 2014 Basis Technology Corp.
|
||||||
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.sleuthkit.autopsy.ewfverify;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import org.openide.util.lookup.ServiceProvider;
|
||||||
|
import org.sleuthkit.autopsy.coreutils.Version;
|
||||||
|
import org.sleuthkit.autopsy.ingest.AbstractIngestModuleFactory;
|
||||||
|
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||||
|
import org.sleuthkit.autopsy.ingest.IngestModuleFactory;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An factory that creates file ingest modules that do hash database lookups.
|
||||||
|
*/
|
||||||
|
@ServiceProvider(service=IngestModuleFactory.class)
|
||||||
|
public class EwfVerifierModuleFactory extends AbstractIngestModuleFactory {
|
||||||
|
@Override
|
||||||
|
public String getModuleDisplayName() {
|
||||||
|
return getModuleName();
|
||||||
|
}
|
||||||
|
|
||||||
|
static String getModuleName() {
|
||||||
|
return "EWF Verify"; // RJCTODO: Is this what we want here?
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleDescription() {
|
||||||
|
return "Validates the integrity of E01 files.";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getModuleVersionNumber() {
|
||||||
|
return Version.getVersion();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isDataSourceIngestModuleFactory() {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataSourceIngestModule createDataSourceIngestModule(Serializable ingestOptions) throws IngestModuleFactory.InvalidOptionsException {
|
||||||
|
return new EwfVerifyIngestModule();
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
/*
|
/*
|
||||||
* Autopsy Forensic Browser
|
* Autopsy Forensic Browser
|
||||||
*
|
*
|
||||||
* Copyright 2013 Basis Technology Corp.
|
* Copyright 2013-2014 Basis Technology Corp.
|
||||||
* Contact: carrier <at> sleuthkit <dot> org
|
* Contact: carrier <at> sleuthkit <dot> org
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
@ -16,204 +16,180 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
//package org.sleuthkit.autopsy.ewfverify;
|
|
||||||
//
|
|
||||||
//import java.security.MessageDigest;
|
|
||||||
//import java.security.NoSuchAlgorithmException;
|
|
||||||
//import java.util.logging.Level;
|
|
||||||
//import java.util.logging.Logger;
|
|
||||||
//import javax.xml.bind.DatatypeConverter;
|
|
||||||
//import org.sleuthkit.autopsy.casemodule.Case;
|
|
||||||
//import org.sleuthkit.autopsy.coreutils.Version;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestMessage;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestModuleDataSource;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestModuleInit;
|
|
||||||
//import org.sleuthkit.autopsy.ingest.IngestServices;
|
|
||||||
//import org.sleuthkit.datamodel.Content;
|
|
||||||
//import org.sleuthkit.datamodel.Image;
|
|
||||||
//import org.sleuthkit.datamodel.SleuthkitCase;
|
|
||||||
//import org.sleuthkit.datamodel.TskCoreException;
|
|
||||||
//import org.sleuthkit.datamodel.TskData;
|
|
||||||
|
|
||||||
///**
|
package org.sleuthkit.autopsy.ewfverify;
|
||||||
// * Data Source Ingest Module that generates a hash of an E01 image file and
|
|
||||||
// * verifies it with the value stored in the image.
|
import java.security.MessageDigest;
|
||||||
// *
|
import java.security.NoSuchAlgorithmException;
|
||||||
// * @author jwallace
|
import java.util.logging.Level;
|
||||||
// */
|
import java.util.logging.Logger;
|
||||||
//public class EwfVerifyIngestModule extends IngestModuleDataSource {
|
import javax.xml.bind.DatatypeConverter;
|
||||||
// private static final String MODULE_NAME = "EWF Verify";
|
import org.sleuthkit.autopsy.casemodule.Case;
|
||||||
// private static final String MODULE_VERSION = Version.getVersion();
|
import org.sleuthkit.autopsy.ingest.IngestDataSourceWorkerController;
|
||||||
// private static final String MODULE_DESCRIPTION = "Validates the integrity of E01 files.";
|
import org.sleuthkit.autopsy.ingest.IngestMessage;
|
||||||
// private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
|
import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType;
|
||||||
// private IngestServices services;
|
import org.sleuthkit.autopsy.ingest.IngestServices;
|
||||||
// private volatile boolean running = false;
|
import org.sleuthkit.datamodel.Content;
|
||||||
// private Image img;
|
import org.sleuthkit.datamodel.Image;
|
||||||
// private String imgName;
|
import org.sleuthkit.datamodel.SleuthkitCase;
|
||||||
// private MessageDigest messageDigest;
|
import org.sleuthkit.datamodel.TskCoreException;
|
||||||
// private static Logger logger = null;
|
import org.sleuthkit.datamodel.TskData;
|
||||||
// private static int messageId = 0;
|
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
|
||||||
// private boolean verified = false;
|
|
||||||
// private boolean skipped = false;
|
/**
|
||||||
// private String calculatedHash = "";
|
* Data source ingest module that verifies the integrity of an Expert Witness
|
||||||
// private String storedHash = "";
|
* Format (EWF) E01 image file by generating a hash of the file and comparing it
|
||||||
// private SleuthkitCase skCase;
|
* to the value stored in the image.
|
||||||
//
|
*/
|
||||||
// public EwfVerifyIngestModule() {
|
public class EwfVerifyIngestModule implements DataSourceIngestModule {
|
||||||
// }
|
private static final long DEFAULT_CHUNK_SIZE = 32 * 1024;
|
||||||
//
|
private IngestServices services;
|
||||||
// @Override
|
private volatile boolean running = false;
|
||||||
// public void process(PipelineContext<IngestModuleDataSource> pipelineContext, Content dataSource, IngestDataSourceWorkerController controller) {
|
private Image img;
|
||||||
// imgName = dataSource.getName();
|
private String imgName;
|
||||||
// try {
|
private MessageDigest messageDigest;
|
||||||
// img = dataSource.getImage();
|
private static Logger logger = null;
|
||||||
// } catch (TskCoreException ex) {
|
private static int messageId = 0;
|
||||||
// img = null;
|
private boolean verified = false;
|
||||||
// logger.log(Level.SEVERE, "Failed to get image from Content.", ex);
|
private boolean skipped = false;
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
private String calculatedHash = "";
|
||||||
// "Error processing " + imgName));
|
private String storedHash = "";
|
||||||
// return;
|
|
||||||
// }
|
EwfVerifyIngestModule() {
|
||||||
//
|
}
|
||||||
// // Skip images that are not E01
|
|
||||||
// if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
|
@Override
|
||||||
// img = null;
|
public String getDisplayName() {
|
||||||
// logger.log(Level.INFO, "Skipping non-ewf image " + imgName);
|
return EwfVerifierModuleFactory.getModuleName();
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
|
}
|
||||||
// "Skipping non-ewf image " + imgName));
|
|
||||||
// skipped = true;
|
@Override
|
||||||
// return;
|
public void init(long taskId) {
|
||||||
// }
|
services = IngestServices.getDefault();
|
||||||
//
|
running = false;
|
||||||
//
|
verified = false;
|
||||||
// if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
|
skipped = false;
|
||||||
// {
|
img = null;
|
||||||
// storedHash = img.getMd5().toLowerCase();
|
imgName = "";
|
||||||
// logger.info("Hash value stored in " + imgName + ": " + storedHash);
|
storedHash = "";
|
||||||
//
|
calculatedHash = "";
|
||||||
// }
|
|
||||||
// else {
|
if (logger == null) {
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
logger = services.getLogger(this);
|
||||||
// "Image " + imgName + " does not have stored hash."));
|
}
|
||||||
// return;
|
|
||||||
// }
|
if (messageDigest == null) {
|
||||||
//
|
try {
|
||||||
// logger.log(Level.INFO, "Starting ewf verification of " + img.getName());
|
messageDigest = MessageDigest.getInstance("MD5");
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
|
} catch (NoSuchAlgorithmException ex) {
|
||||||
// "Starting " + imgName));
|
logger.log(Level.WARNING, "Error getting md5 algorithm", ex);
|
||||||
//
|
throw new RuntimeException("Failed to get MD5 algorithm");
|
||||||
// long size = img.getSize();
|
}
|
||||||
// if (size == 0) {
|
} else {
|
||||||
// logger.log(Level.WARNING, "Size of image " + imgName + " was 0 when queried.");
|
messageDigest.reset();
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
}
|
||||||
// "Error getting size of " + imgName + ". Image will not be processed."));
|
}
|
||||||
// }
|
|
||||||
//
|
@Override
|
||||||
// // Libewf uses a sector size of 64 times the sector size, which is the
|
public void process(Content dataSource, IngestDataSourceWorkerController statusHelper) {
|
||||||
// // motivation for using it here.
|
imgName = dataSource.getName();
|
||||||
// long chunkSize = 64 * img.getSsize();
|
try {
|
||||||
// chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
|
img = dataSource.getImage();
|
||||||
//
|
} catch (TskCoreException ex) {
|
||||||
// int totalChunks = (int) Math.ceil(size / chunkSize);
|
img = null;
|
||||||
// logger.log(Level.INFO, "Total chunks = " + totalChunks);
|
logger.log(Level.SEVERE, "Failed to get image from Content.", ex);
|
||||||
// int read;
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
||||||
//
|
"Error processing " + imgName));
|
||||||
// byte[] data;
|
return;
|
||||||
// controller.switchToDeterminate(totalChunks);
|
}
|
||||||
//
|
|
||||||
// running = true;
|
// Skip images that are not E01
|
||||||
// // Read in byte size chunks and update the hash value with the data.
|
if (img.getType() != TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_EWF_EWF) {
|
||||||
// for (int i = 0; i < totalChunks; i++) {
|
img = null;
|
||||||
// if (controller.isCancelled()) {
|
logger.log(Level.INFO, "Skipping non-ewf image {0}", imgName);
|
||||||
// running = false;
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
|
||||||
// return;
|
"Skipping non-ewf image " + imgName));
|
||||||
// }
|
skipped = true;
|
||||||
// data = new byte[ (int) chunkSize ];
|
return;
|
||||||
// try {
|
}
|
||||||
// read = img.read(data, i * chunkSize, chunkSize);
|
|
||||||
// } catch (TskCoreException ex) {
|
if ((img.getMd5()!= null) && !img.getMd5().isEmpty())
|
||||||
// String msg = "Error reading " + imgName + " at chunk " + i;
|
{
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg));
|
storedHash = img.getMd5().toLowerCase();
|
||||||
// logger.log(Level.SEVERE, msg, ex);
|
logger.log(Level.INFO, "Hash value stored in {0}: {1}", new Object[]{imgName, storedHash});
|
||||||
// return;
|
|
||||||
// }
|
}
|
||||||
// messageDigest.update(data);
|
else {
|
||||||
// controller.progress(i);
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
||||||
// }
|
"Image " + imgName + " does not have stored hash."));
|
||||||
//
|
return;
|
||||||
// // Finish generating the hash and get it as a string value
|
}
|
||||||
// calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
|
|
||||||
// verified = calculatedHash.equals(storedHash);
|
logger.log(Level.INFO, "Starting ewf verification of {0}", img.getName());
|
||||||
// logger.info("Hash calculated from " + imgName + ": " + calculatedHash);
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this,
|
||||||
// running = false;
|
"Starting " + imgName));
|
||||||
// }
|
|
||||||
//
|
long size = img.getSize();
|
||||||
// @Override
|
if (size == 0) {
|
||||||
// public void init(IngestModuleInit initContext) {
|
logger.log(Level.WARNING, "Size of image {0} was 0 when queried.", imgName);
|
||||||
// services = IngestServices.getDefault();
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this,
|
||||||
// skCase = Case.getCurrentCase().getSleuthkitCase();
|
"Error getting size of " + imgName + ". Image will not be processed."));
|
||||||
// running = false;
|
}
|
||||||
// verified = false;
|
|
||||||
// skipped = false;
|
// Libewf uses a sector size of 64 times the sector size, which is the
|
||||||
// img = null;
|
// motivation for using it here.
|
||||||
// imgName = "";
|
long chunkSize = 64 * img.getSsize();
|
||||||
// storedHash = "";
|
chunkSize = (chunkSize == 0) ? DEFAULT_CHUNK_SIZE : chunkSize;
|
||||||
// calculatedHash = "";
|
|
||||||
//
|
int totalChunks = (int) Math.ceil(size / chunkSize);
|
||||||
// if (logger == null) {
|
logger.log(Level.INFO, "Total chunks = {0}", totalChunks);
|
||||||
// logger = services.getLogger(this);
|
int read;
|
||||||
// }
|
|
||||||
//
|
byte[] data;
|
||||||
// if (messageDigest == null) {
|
statusHelper.switchToDeterminate(totalChunks);
|
||||||
// try {
|
|
||||||
// messageDigest = MessageDigest.getInstance("MD5");
|
running = true;
|
||||||
// } catch (NoSuchAlgorithmException ex) {
|
// Read in byte size chunks and update the hash value with the data.
|
||||||
// logger.log(Level.WARNING, "Error getting md5 algorithm", ex);
|
for (int i = 0; i < totalChunks; i++) {
|
||||||
// throw new RuntimeException("Failed to get MD5 algorithm");
|
if (statusHelper.isCancelled()) {
|
||||||
// }
|
running = false;
|
||||||
// } else {
|
return;
|
||||||
// messageDigest.reset();
|
}
|
||||||
// }
|
data = new byte[ (int) chunkSize ];
|
||||||
// }
|
try {
|
||||||
//
|
read = img.read(data, i * chunkSize, chunkSize);
|
||||||
// @Override
|
} catch (TskCoreException ex) {
|
||||||
// public void complete() {
|
String msg = "Error reading " + imgName + " at chunk " + i;
|
||||||
// logger.info("complete() " + this.getName());
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.ERROR, this, msg));
|
||||||
// if (skipped == false) {
|
logger.log(Level.SEVERE, msg, ex);
|
||||||
// String msg = verified ? " verified" : " not verified";
|
return;
|
||||||
// String extra = "<p>EWF Verification Results for " + imgName + "</p>";
|
}
|
||||||
// extra += "<li>Result:" + msg + "</li>";
|
messageDigest.update(data);
|
||||||
// extra += "<li>Calculated hash: " + calculatedHash + "</li>";
|
statusHelper.progress(i);
|
||||||
// extra += "<li>Stored hash: " + storedHash + "</li>";
|
}
|
||||||
// services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra));
|
|
||||||
// logger.info(imgName + msg);
|
// Finish generating the hash and get it as a string value
|
||||||
// }
|
calculatedHash = DatatypeConverter.printHexBinary(messageDigest.digest()).toLowerCase();
|
||||||
// }
|
verified = calculatedHash.equals(storedHash);
|
||||||
//
|
logger.log(Level.INFO, "Hash calculated from {0}: {1}", new Object[]{imgName, calculatedHash});
|
||||||
// @Override
|
running = false;
|
||||||
// public void stop() {
|
}
|
||||||
// running = false;
|
|
||||||
// }
|
@Override
|
||||||
//
|
public void complete() {
|
||||||
// @Override
|
logger.log(Level.INFO, "complete() {0}", getDisplayName());
|
||||||
// public String getName() {
|
if (skipped == false) {
|
||||||
// return MODULE_NAME;
|
String msg = verified ? " verified" : " not verified";
|
||||||
// }
|
String extra = "<p>EWF Verification Results for " + imgName + "</p>";
|
||||||
//
|
extra += "<li>Result:" + msg + "</li>";
|
||||||
// @Override
|
extra += "<li>Calculated hash: " + calculatedHash + "</li>";
|
||||||
// public String getVersion() {
|
extra += "<li>Stored hash: " + storedHash + "</li>";
|
||||||
// return MODULE_VERSION;
|
services.postMessage(IngestMessage.createMessage(++messageId, MessageType.INFO, this, imgName + msg, extra));
|
||||||
// }
|
logger.log(Level.INFO, "{0}{1}", new Object[]{imgName, msg});
|
||||||
//
|
}
|
||||||
// @Override
|
}
|
||||||
// public String getDescription() {
|
|
||||||
// return MODULE_DESCRIPTION;
|
@Override
|
||||||
// }
|
public void stop() {
|
||||||
//
|
}
|
||||||
// @Override
|
}
|
||||||
// public boolean hasBackgroundJobsRunning() {
|
|
||||||
// return running;
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user