Merge branch 'master' of github.com:sleuthkit/autopsy

This commit is contained in:
Dick Fickling 2012-04-26 13:27:12 -04:00
commit 35be56d87a
5 changed files with 171 additions and 43 deletions

3
.gitignore vendored
View File

@ -16,4 +16,5 @@
/KeywordSearch/release/solr/webapps/solr.war
/DataModel/release/modules/ext/sqlite-jdbc-3.7.2.jar
/DataModel/release/modules/lib/zlib.dll
/DataModel/release/modules/lib/zlib.dll
/branding_spear

View File

@ -40,11 +40,21 @@ public class AutopsyExceptionHandler extends Handler {
static final int WARNING_VALUE = Level.WARNING.intValue();
static final int SEVERE_VALUE = Level.SEVERE.intValue();
static final Handler nbErrorManager = new NbErrorManager(); // Default NetBeans handler
static final Version.Type buildType = Version.getBuildType();
public AutopsyExceptionHandler() {
super();
// Only display messages for SEVERE level and above, that come from an uncaught exception.
this.setLevel(Level.SEVERE);
/*
if (buildType == Version.Type.DEVELOPMENT)
//for dev builds, show dialogs for WARNING and above
this.setLevel(Level.WARNING);
else
//for production builds, show dialogs for SEVERE and above (TODO in future consider not show any, explicit dialogs should be in place)
this.setLevel(Level.SEVERE);
*/
this.setFilter(new ExceptionFilter());
this.setFormatter(new SimpleFormatter());
}

View File

@ -90,6 +90,9 @@ public class IngestManager {
final IngestManagerProxy managerProxy = new IngestManagerProxy(this);
//notifications
private final static PropertyChangeSupport pcs = new PropertyChangeSupport(IngestManager.class);
//monitor
private final IngestMonitor ingestMonitor = new IngestMonitor();
private enum IngestManagerEvents {
@ -193,6 +196,9 @@ public class IngestManager {
logger.log(Level.INFO, "Image queue: " + this.imageQueue.toString());
logger.log(Level.INFO, "File queue: " + this.fsContentQueue.toString());
if (! ingestMonitor.isRunning())
ingestMonitor.start();
//image ingesters
// cycle through each image in the queue
while (hasNextImage()) {

View File

@ -0,0 +1,118 @@
/*
* Autopsy Forensic Browser
*
* Copyright 2012 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.ingest;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.File;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.Timer;
import org.sleuthkit.autopsy.casemodule.Case;
/**
* Monitor health of the system and stop ingest if necessary
*/
public class IngestMonitor {
private static final int INITIAL_INTERVAL_MS = 60000; //1 min.
private static final Logger logger = Logger.getLogger(IngestMonitor.class.getName());
private Timer timer;
/**
* Start the monitor
*/
void start() {
timer = new Timer(INITIAL_INTERVAL_MS, new MonitorAction());
timer.start();
}
/**
* Stop the monitor
*/
void stop() {
if (timer != null)
timer.stop();
}
/**
* Check if the monitor is running
* @return true if the monitor is running, false otherwise
*/
boolean isRunning() {
return timer != null && timer.isRunning();
}
private class MonitorAction implements ActionListener {
private final static long MIN_FREE_DISK_SPACE = 100L * 1024 * 1024; //100MB
private File root = new File(File.separator); //default, roto dir where autopsy runs
MonitorAction() {
//find drive where case is located
String caseDir = Case.getCurrentCase().getCaseDirectory();
File curDir = new File(caseDir);
File tempF = null;
while ( (tempF = curDir.getParentFile()) != null)
curDir = tempF;
root = curDir;
//logger.log(Level.INFO, "Using case root: " + curDir.getAbsolutePath());
}
@Override
public void actionPerformed(ActionEvent e) {
final IngestManager manager = IngestManager.getDefault();
//runs checks only if ingest is running
if (manager.isIngestRunning() == false)
return;
if (checkDiskSpace() == false) {
//stop ingest if running
final String diskPath = root.getAbsolutePath();
logger.log(Level.SEVERE, "Stopping ingest due to low disk space on disk " + diskPath);
manager.stopAll();
manager.postMessage(IngestMessage.createManagerMessage("Stopping ingest due to low disk space on disk " + diskPath, "Stopping ingest due to low disk space on disk " + diskPath + ". Please ensure the drive where Case is located has at least 1GB free space (more for large images) and restart ingest."));
}
}
/**
* check disk space
* @return true if OK, false otherwise
*/
private boolean checkDiskSpace() {
long freeSpace;
try {
freeSpace = root.getFreeSpace();
}
catch (SecurityException e) {
logger.log(Level.WARNING, "Unable to check for free disk space (permission issue)", e);
return true; //OK
}
//logger.log(Level.INFO, "Checking free disk apce: " + freeSpace + " need: " + Long.toString(MIN_FREE_DISK_SPACE));
return freeSpace > MIN_FREE_DISK_SPACE;
}
}
}

View File

@ -18,6 +18,8 @@
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
@ -27,6 +29,7 @@ import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.SwingUtilities;
import javax.swing.SwingWorker;
import javax.swing.Timer;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.netbeans.api.progress.ProgressHandle;
@ -45,7 +48,7 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.FsContent;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.TskException;
import org.sleuthkit.datamodel.TskData;
//service provider registered in layer.xml
public final class KeywordSearchIngestService implements IngestServiceFsContent {
@ -59,12 +62,11 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
private static final long MAX_INDEX_SIZE = 100 * (1 << 10) * (1 << 10);
private Ingester ingester = null;
private volatile boolean commitIndex = false; //whether to commit index next time
private volatile boolean runTimer = false;
private List<Keyword> keywords; //keywords to search
private List<String> keywordLists; // lists currently being searched
private Map<String, String> keywordToList; //keyword to list name mapping
//private final Object lock = new Object();
private Thread timer;
private Timer commitTimer;
private Indexer indexer;
private Searcher searcher;
private volatile boolean searcherDone = true;
@ -149,7 +151,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
}
//logger.log(Level.INFO, "complete()");
runTimer = false;
commitTimer.stop();
//handle case if previous search running
//cancel it, will re-run after final commit
@ -185,7 +187,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
logger.log(Level.INFO, "stop()");
//stop timer
runTimer = false;
commitTimer.stop();
//stop searcher
if (searcher != null) {
searcher.cancel(true);
@ -254,12 +256,11 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
final int commitIntervalMs = managerProxy.getUpdateFrequency() * 60 * 1000;
logger.log(Level.INFO, "Using refresh interval (ms): " + commitIntervalMs);
timer = new CommitTimer(commitIntervalMs);
runTimer = true;
commitTimer = new Timer(commitIntervalMs, new CommitTimerAction());
initialized = true;
timer.start();
commitTimer.start();
managerProxy.postMessage(IngestMessage.createMessage(++messageID, MessageType.INFO, this, "Started"));
}
@ -410,31 +411,16 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
}
}
//CommitTimer wakes up every interval ms
//and sets a flag for indexer to commit after indexing next file
private class CommitTimer extends Thread {
//CommitTimerAction to run by commitTimer
//sets a flag for indexer to commit after indexing next file
private class CommitTimerAction implements ActionListener {
private final Logger logger = Logger.getLogger(CommitTimer.class.getName());
private int interval;
CommitTimer(int interval) {
this.interval = interval;
}
private final Logger logger = Logger.getLogger(CommitTimerAction.class.getName());
@Override
public void run() {
while (runTimer) {
try {
Thread.sleep(interval);
commitIndex = true;
logger.log(Level.INFO, "CommitTimer awake");
} catch (InterruptedException e) {
break;
}
}
commitIndex = false;
return;
public void actionPerformed(ActionEvent e) {
commitIndex = true;
logger.log(Level.INFO, "CommitTimer awake");
}
}
@ -444,6 +430,7 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
private class Indexer {
private final Logger logger = Logger.getLogger(Indexer.class.getName());
private static final String DELETED_MSG = "The file is an unallocated or orphan file (deleted) and entire content is no longer recoverable. ";
private boolean extractAndIngest(FsContent f) {
boolean success = false;
@ -480,7 +467,13 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
}
}
String deletedMessage = "";
if ((fsContent.getMeta_flags() & (TskData.TSK_FS_META_FLAG_ENUM.ORPHAN.getMetaFlag() | TskData.TSK_FS_META_FLAG_ENUM.UNALLOC.getMetaFlag())) != 0) {
deletedMessage = DELETED_MSG;
}
if (ingestible == true) {
try {
//logger.log(Level.INFO, "indexing: " + fsContent.getName());
ingester.ingest(fsContent);
@ -489,40 +482,40 @@ public final class KeywordSearchIngestService implements IngestServiceFsContent
ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED);
//try to extract strings
boolean processed = processNonIngestible(fsContent);
postIngestibleErrorMessage(processed, fileName);
postIngestibleErrorMessage(processed, fileName, deletedMessage);
} catch (Exception e) {
ingestStatus.put(fsContent.getId(), IngestStatus.SKIPPED);
//try to extract strings
boolean processed = processNonIngestible(fsContent);
postIngestibleErrorMessage(processed, fileName);
postIngestibleErrorMessage(processed, fileName, deletedMessage);
}
} else {
boolean processed = processNonIngestible(fsContent);
postNonIngestibleErrorMessage(processed, fsContent);
postNonIngestibleErrorMessage(processed, fsContent, deletedMessage);
}
}
private void postNonIngestibleErrorMessage(boolean stringsExtracted, FsContent fsContent) {
private void postNonIngestibleErrorMessage(boolean stringsExtracted, FsContent fsContent, String deletedMessage) {
String fileName = fsContent.getName();
if (!stringsExtracted) {
if (fsContent.getSize() < MAX_STRING_EXTRACT_SIZE) {
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchIngestService.instance, "Error indexing strings: " + fileName, "Error encountered extracting string content from this file (of unsupported format). The file will not be included in the search results.<br />File: " + fileName));
}
else {
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchIngestService.instance, "Error indexing strings: " + fileName, "Error encountered extracting string content from this file (of unsupported format). " + deletedMessage + "The file will not be included in the search results.<br />File: " + fileName));
} else {
managerProxy.postMessage(IngestMessage.createMessage(++messageID, IngestMessage.MessageType.INFO, KeywordSearchIngestService.instance, "Skipped indexing strings: " + fileName, "Skipped extracting string content from this file (of unsupported format) due to the file size. The file will not be included in the search results.<br />File: " + fileName));
}
}
}
private void postIngestibleErrorMessage(boolean stringsExtracted, String fileName) {
private void postIngestibleErrorMessage(boolean stringsExtracted, String fileName, String deletedMessage) {
if (stringsExtracted) {
managerProxy.postMessage(IngestMessage.createWarningMessage(++messageID, KeywordSearchIngestService.instance, "Indexed strings only: " + fileName, "Error encountered extracting file content. Used string extraction to index strings for partial analysis on this file.<br />File: " + fileName));
managerProxy.postMessage(IngestMessage.createWarningMessage(++messageID, KeywordSearchIngestService.instance, "Indexed strings only: " + fileName, "Error encountered extracting file content. " + deletedMessage + "Used string extraction to index strings for partial analysis on this file.<br />File: " + fileName));
} else {
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchIngestService.instance, "Error indexing: " + fileName, "Error encountered extracting file content and strings from this file. The file will not be included in the search results.<br />File: " + fileName));
managerProxy.postMessage(IngestMessage.createErrorMessage(++messageID, KeywordSearchIngestService.instance, "Error indexing: " + fileName, "Error encountered extracting file content and strings from this file. " + deletedMessage + "The file will not be included in the search results.<br />File: " + fileName));
}
}