Merge branch 'master' of github.com:sleuthkit/autopsy

This commit is contained in:
Brian Carrier 2013-10-30 00:50:25 -04:00
commit 8c0786c848
3 changed files with 70 additions and 28 deletions

View File

@ -11,8 +11,7 @@ correct C libraries.
STEPS:
1) Get Java Setup
1a) Download and install 32-bit version of JDK version 1.7 (32-bit is currently
needed even if you have a 64-bit system).
1a) Download and install JDK version 1.7. You can now use 32-bit or 64-bit, but special work is needed to get The Sleuth Kit to compile as 64-bit. So, 32-bit is easier.
Autopsy has been used and tested with Oracle JavaSE and the included JavaFX support
(http://www.oracle.com/technetwork/java/javase/downloads/index.html).
@ -26,7 +25,8 @@ Note: Netbeans IDE is not required to build and run Autopsy,
but it is a recommended IDE to use for development of Autopsy modules.
1d) (optional) If you are going to package Autopsy, then you'll also
need to set JRE_HOME to the root JRE directory.
need to set JRE_HOME_32 to the root 32-bit JRE directory and/or JRE_HOME_64
to the root 64-bit JRE directory.
1e) (optional) For some Autopsy features to be functional, you need to add java executable to the system PATH.
@ -37,6 +37,9 @@ need to set JRE_HOME to the root JRE directory.
later). All you need is the dll file. Note that you will get a
launching error if you use libewf 1.
- http://sourceforge.net/projects/libewf/
If you want to build the 64-bit version of The Sleuth Kit, download
our 64-bit version of libewf:
- https://github.com/sleuthkit/libewf_64bit
2b) Set LIBEWF_HOME environment variable to root directory of LIBEWF
@ -97,13 +100,13 @@ BACKGROUND:
Here are some notes to shed some light on what is going on during
the build process.
- NetBeans uses ant to build Autopsy. The build target locates TSK
(and LIBEWF) based on the environment variables and copies the
needed JAR and library files into the DataModel module in the Autopsy
project (see build-unix.xml and build-windows.xml in the root
directory for details). If you want to use the debug version of
the TSK dll, then edit the copy line in the build-windows.xml file
to copy from the Debug folder.
- The Sleuth Kit Java datamodel JAR file has native libraries
that are copied into it.
- NetBeans uses ant to build Autopsy. The build target copies the
TSK datamodel JAR file into the project. If you want to use the
debug version of the TSK dll, then there is a different ant target
in TSK to copy the debug versions of the dlls.
- On a Windows system, the ant target copies all needed libraries
to the autopsy folder. On a Unix system, the ant taget copies only

View File

@ -86,40 +86,52 @@ public class IngestManager {
public enum IngestModuleEvent {
/**
* Event sent when the ingest module has been started processing. Second
* argument of the property change fired contains module name String and
* third argument is null.
* Event sent when an ingest module has been started. Second
* argument of the property change is a string form of the module name
* and the third argument is null.
*/
STARTED,
/**
* Event sent when the ingest module has completed processing. Second
* argument of the property change fired contains module name String and
* third argument is null.
* Event sent when an ingest module has completed processing by its own
* means. Second
* argument of the property change is a string form of the module name
* and the third argument is null.
*
* This event is generally used by listeners to perform a final data
* view refresh (listeners need to query all data from the blackboard).
*
*/
COMPLETED,
/**
* Event sent when the ingest module has stopped processing, and likely
* Event sent when an ingest module has stopped processing, and likely
* not all data has been processed. Second argument of the property
* change fired contains module name String and third argument is null.
* change is a string form of the module name and third argument is null.
*/
STOPPED,
/**
* Event sent when ingest module has new data. Second argument of the
* Event sent when ingest module posts new data to blackboard or somewhere
* else. Second argument of the
* property change fired contains ModuleDataEvent object and third
* argument is null. The object can contain encapsulated new data
* created by the module. Listener can also query new data as needed.
*
*/
DATA,
/**
* Event send when content changed, either its attributes changed, or
* new content children have been added
* new content children have been added. I.e. from ZIP files or Carved files
*/
CONTENT_CHANGED
CONTENT_CHANGED,
/**
* Event sent when a file has finished going through a pipeline of modules.
* Second argument is the object ID. Third argument is null
*/
FILE_DONE,
};
//ui
//Initialized by Installer in AWT thread once the Window System is ready
@ -196,11 +208,29 @@ public class IngestManager {
static synchronized void fireModuleEvent(String eventType, String moduleName) {
pcs.firePropertyChange(eventType, moduleName, null);
}
/**
* Fire event when file is done with a pipeline run
* @param objId ID of file that is done
*/
static synchronized void fireFileDone(long objId) {
pcs.firePropertyChange(IngestModuleEvent.FILE_DONE.toString(), objId, null);
}
/**
* Fire event for ModuleDataEvent (when modules post data to blackboard, etc.)
* @param moduleDataEvent
*/
static synchronized void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
pcs.firePropertyChange(IngestModuleEvent.DATA.toString(), moduleDataEvent, null);
}
/**
* Fire event for ModuleContentChanged (when modules create new content that needs to be analyzed)
* @param moduleContentEvent
*/
static synchronized void fireModuleContentEvent(ModuleContentEvent moduleContentEvent) {
pcs.firePropertyChange(IngestModuleEvent.CONTENT_CHANGED.toString(), moduleContentEvent, null);
}
@ -282,7 +312,8 @@ public class IngestManager {
}
/**
* Starts the needed worker threads.
* Starts the File-level Ingest Module pipeline and the Data Source-level Ingest Modules
* for the queued up data sources and files.
*
* if AbstractFile module is still running, do nothing and allow it to
* consume queue otherwise start /restart AbstractFile worker
@ -303,8 +334,10 @@ public class IngestManager {
ingestMonitor.start();
}
/////////
// Start the data source-level ingest modules
List<IngestDataSourceThread> newThreads = new ArrayList<>();
//image ingesters
// cycle through each data source content in the queue
while (dataSourceScheduler.hasNext()) {
if (allInited == false) {
@ -987,10 +1020,14 @@ public class IngestManager {
logger.log(Level.SEVERE, "Error: out of memory from module: " + module.getName(), e);
stats.addError(module);
}
} //end for every module
//free the internal file resource after done with every module
fileToProcess.close();
// notify listeners thsi file is done
fireFileDone(fileToProcess.getId());
int newTotalEnqueuedFiles = fileScheduler.getFilesEnqueuedEst();
if (newTotalEnqueuedFiles > totalEnqueuedFiles) {
@ -1076,7 +1113,7 @@ public class IngestManager {
}
}
/* Thread that adds content/file and module pairs to queues */
/* Thread that adds content/file and module pairs to queues. Starts pipelines when done. */
private class EnqueueWorker extends SwingWorker<Object, Void> {
private List<IngestModuleAbstract> modules;

View File

@ -110,7 +110,8 @@ public class IngestServices {
/**
* Fire module data event to notify registered module data event listeners
* Fire module data event to notify registered module data event listeners that there
* is new data of a given type from a module
* @param moduleDataEvent module data event, encapsulating blackboard artifact data
*/
public void fireModuleDataEvent(ModuleDataEvent moduleDataEvent) {
@ -120,6 +121,7 @@ public class IngestServices {
/**
* Fire module content event to notify registered module content event listeners
* that there is new content (from ZIP file contents, carving, etc.)
* @param moduleContentEvent module content event, encapsulating content changed
*/
public void fireModuleContentEvent(ModuleContentEvent moduleContentEvent) {