Merge pull request #941 from rcordovano/recent_activity_cancellable

Recent activity cancellable
This commit is contained in:
Richard Cordovano 2014-11-06 22:05:11 -05:00
commit 88677f30a9
12 changed files with 101 additions and 136 deletions

View File

@ -150,6 +150,9 @@ public final class ExecUtil {
}
}
/**
* EVERYTHING FOLLOWING THIS LINE IS DEPRECATED AND SLATED FOR REMOVAL
*/
private static final Logger logger = Logger.getLogger(ExecUtil.class.getName());
private Process proc = null;
private ExecUtil.StreamToStringRedirect errorStringRedirect = null;

View File

@ -52,6 +52,7 @@ import org.sleuthkit.autopsy.externalresults.ExternalResults;
import org.sleuthkit.autopsy.externalresults.ExternalResultsImporter;
import org.sleuthkit.autopsy.externalresults.ExternalResultsXMLParser;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModule;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.ingest.IngestMessage;
@ -74,14 +75,14 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
private static final IngestModuleReferenceCounter refCounter = new IngestModuleReferenceCounter();
private static final String moduleName = SampleExecutableIngestModuleFactory.getModuleName();
private final String fileInCaseDatabase = "/WINDOWS/system32/ntmsapi.dll"; // Probably
private long jobId;
private IngestJobContext context;
private String outputDirPath;
private String derivedFileInCaseDatabase;
@Override
public void startUp(IngestJobContext context) throws IngestModuleException {
jobId = context.getJobId();
if (refCounter.incrementAndGet(jobId) == 1) {
this.context = context;
if (refCounter.incrementAndGet(context.getJobId()) == 1) {
// Create an output directory for this job.
outputDirPath = Case.getCurrentCase().getModulesOutputDirAbsPath() + File.separator + moduleName; //NON-NLS
File outputDir = new File(outputDirPath);
@ -93,7 +94,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
@Override
public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) {
if (refCounter.get(jobId) == 1) {
if (refCounter.get(context.getJobId()) == 1) {
try {
// There will be two tasks: data source analysis and import of
// the results of the analysis.
@ -108,14 +109,18 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
// derived files, and reports generated by the analysis. In this
// sample ingest module, the generation of the analysis results is
// simulated.
String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", jobId);
String resultsFilePath = outputDirPath + File.separator + String.format("job_%d_results.xml", context.getJobId());
boolean haveRealExecutable = false;
if (haveRealExecutable) {
if (dataSource instanceof Image) {
Image image = (Image)dataSource;
String dataSourcePath = image.getPaths()[0];
ExecUtil executor = new ExecUtil();
executor.execute("some.exe", dataSourcePath, resultsFilePath);
List<String> commandLine = new ArrayList<>();
commandLine.add("some.exe");
commandLine.add(dataSourcePath);
commandLine.add(resultsFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
}
// not a disk image
else {
@ -136,7 +141,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
IngestServices.getInstance().postMessage(IngestMessage.createErrorMessage(moduleName, "External Results Import Error", errorInfo.getMessage()));
}
progressBar.progress(2);
} catch (InterruptedException | ParserConfigurationException | TransformerException | IOException ex) {
} catch (ParserConfigurationException | TransformerException | IOException ex) {
Logger logger = IngestServices.getInstance().getLogger(moduleName);
logger.log(Level.SEVERE, "Failed to simulate analysis and results import", ex); //NON-NLS
return ProcessResult.ERROR;
@ -155,7 +160,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated derived file.";
for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_derived_file_%d.txt", jobId, i);
String fileName = String.format("job_%d_derived_file_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes()));
if (i == 0) {
this.derivedFileInCaseDatabase = this.fileInCaseDatabase + "/" + fileName;
@ -168,7 +173,7 @@ public class SampleExecutableDataSourceIngestModule implements DataSourceIngestM
List<String> filePaths = new ArrayList<>();
String fileContents = "This is a simulated report.";
for (int i = 0; i < 2; ++i) {
String fileName = String.format("job_%d_report_%d.txt", jobId, i);
String fileName = String.format("job_%d_report_%d.txt", context.getJobId(), i);
filePaths.add(generateFile(fileName, fileContents.getBytes()));
}
return filePaths;

View File

@ -216,7 +216,7 @@ final class PhotoRecCarverFileIngestModule implements FileIngestModule {
PhotoRecCarverOutputParser parser = new PhotoRecCarverOutputParser(outputDirPath);
List<LayoutFile> theList = parser.parse(newAuditFile, id, file);
if (theList != null) { // if there were any results from carving, add the unallocated carving event to the reports list.
context.scheduleFiles(new ArrayList<>(theList));
context.addFilesToJob(new ArrayList<>(theList));
}
}
catch (IOException ex) {

View File

@ -181,7 +181,7 @@ public final class SevenZipIngestModule implements FileIngestModule {
//currently sending a single event for all new files
services.fireModuleContentEvent(new ModuleContentEvent(abstractFile));
context.scheduleFiles(unpackedFiles);
context.addFilesToJob(unpackedFiles);
}
return ProcessResult.OK;

View File

@ -247,8 +247,7 @@ public final class KeywordSearchIngestModule implements FileIngestModule {
return;
}
if (context.isJobCancelled()) {
logger.log(Level.INFO, "Ingest job cancelled"); //NON-NLS
if (context.fileIngestIsCancelled()) {
stop();
return;
}

View File

@ -128,7 +128,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -204,7 +204,7 @@ class Chrome extends Extract {
logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); //NON-NLS
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -341,7 +341,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -416,7 +416,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -504,7 +504,7 @@ class Chrome extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}

View File

@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
import org.sleuthkit.datamodel.Content;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.datamodel.*;
@ -111,7 +112,7 @@ class ExtractIE extends Extract {
continue;
}
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -201,7 +202,7 @@ class ExtractIE extends Extract {
dataFound = true;
for (AbstractFile cookiesFile : cookiesFiles) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
if (cookiesFile.getSize() == 0) {
@ -309,7 +310,7 @@ class ExtractIE extends Extract {
//indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat";
temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; //NON-NLS
File datFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
try {
@ -324,6 +325,9 @@ class ExtractIE extends Extract {
String filename = "pasco2Result." + indexFile.getId() + ".txt"; //NON-NLS
boolean bPascProcSuccess = executePasco(temps, filename);
if (context.dataSourceIngestIsCancelled()) {
return;
}
//At this point pasco2 proccessed the index files.
//Now fetch the results, parse them and the delete the files.
@ -354,34 +358,26 @@ class ExtractIE extends Extract {
*/
private boolean executePasco(String indexFilePath, String outputFileName) {
boolean success = true;
Writer writer = null;
ExecUtil execPasco = new ExecUtil();
try {
final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName;
final String errFileFullPath = moduleTempResultsDir + File.separator + outputFileName + ".err";
logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); //NON-NLS
writer = new FileWriter(outputFileFullPath);
execPasco.execute(writer, JAVA_PATH,
"-cp", PASCO_LIB_PATH, //NON-NLS
"isi.pasco2.Main", "-T", "history", indexFilePath ); //NON-NLS
List<String> commandLine = new ArrayList<>();
commandLine.add(JAVA_PATH);
commandLine.add("-cp"); //NON-NLS
commandLine.add(PASCO_LIB_PATH);
commandLine.add("isi.pasco2.Main"); //NON-NLS
commandLine.add("-T"); //NON-NLS
commandLine.add("history"); //NON-NLS
commandLine.add(indexFilePath);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFileFullPath));
processBuilder.redirectError(new File(errFileFullPath));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
// @@@ Investigate use of history versus cache as type.
} catch (IOException ex) {
success = false;
logger.log(Level.SEVERE, "Unable to execute Pasco to process Internet Explorer web history.", ex); //NON-NLS
} catch (InterruptedException ex) {
success = false;
logger.log(Level.SEVERE, "Pasco has been interrupted, failed to extract some web history from Internet Explorer.", ex); //NON-NLS
}
finally {
if (writer != null) {
try {
writer.flush();
writer.close();
} catch (IOException ex) {
logger.log(Level.WARNING, "Error closing writer stream after for Pasco result", ex); //NON-NLS
}
}
execPasco.stop();
}
return success;
}

View File

@ -37,6 +37,7 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.coreutils.PlatformUtil;
import org.sleuthkit.autopsy.datamodel.ContentUtils;
import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProcessTerminator;
import org.sleuthkit.autopsy.ingest.IngestJobContext;
import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo;
import org.sleuthkit.datamodel.*;
@ -62,14 +63,10 @@ class ExtractRegistry extends Extract {
private String RR_FULL_PATH;
private boolean rrFound = false; // true if we found the Autopsy-specific version of regripper
private boolean rrFullFound = false; // true if we found the full version of regripper
final private static String MODULE_VERSION = "1.0";
private Content dataSource;
private IngestJobContext context;
final private static UsbDeviceIdMapper usbMapper = new UsbDeviceIdMapper();
//hide public constructor to prevent from instantiation by ingest module loader
ExtractRegistry() {
moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractRegistry.moduleName.text");
final File rrRoot = InstalledFileLocator.getDefault().locate("rr", ExtractRegistry.class.getPackage().getName(), false); //NON-NLS
@ -169,7 +166,7 @@ class ExtractRegistry extends Extract {
continue;
}
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -182,10 +179,9 @@ class ExtractRegistry extends Extract {
logger.log(Level.SEVERE, null, ex);
}
logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); //NON-NLS
RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase);
if (context.isJobCancelled()) {
logger.log(Level.INFO, "{0}- Now getting registry information from {1}", new Object[]{moduleName, regFileNameLocal}); //NON-NLS
RegOutputFiles regOutputFiles = ripRegistryFile(regFileNameLocal, outputPathBase);
if (context.dataSourceIngestIsCancelled()) {
break;
}
@ -268,9 +264,9 @@ class ExtractRegistry extends Extract {
* @param regFilePath Path to local copy of registry
* @param outFilePathBase Path to location to save output file to. Base mtimeItem that will be extended on
*/
private RegOutputFiles executeRegRip(String regFilePath, String outFilePathBase) {
private RegOutputFiles ripRegistryFile(String regFilePath, String outFilePathBase) {
String autopsyType = ""; // Type argument for rr for autopsy-specific modules
String fullType = ""; // Type argument for rr for full set of modules
String fullType; // Type argument for rr for full set of modules
RegOutputFiles regOutputFiles = new RegOutputFiles();
@ -298,78 +294,44 @@ class ExtractRegistry extends Extract {
// run the autopsy-specific set of modules
if (!autopsyType.isEmpty() && rrFound) {
// TODO - add error messages
Writer writer = null;
ExecUtil execRR = null;
try {
regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS
logger.log(Level.INFO, "Writing RegRipper results to: " + regOutputFiles.autopsyPlugins); //NON-NLS
writer = new FileWriter(regOutputFiles.autopsyPlugins);
execRR = new ExecUtil();
execRR.execute(writer, RR_PATH,
"-r", regFilePath, "-f", autopsyType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile2",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
regOutputFiles.autopsyPlugins = outFilePathBase + "-autopsy.txt"; //NON-NLS
String errFilePath = outFilePathBase + "-autopsy.err.txt"; //NON-NLS
logger.log(Level.INFO, "Writing RegRipper results to: {0}", regOutputFiles.autopsyPlugins); //NON-NLS
executeRegRipper(regFilePath, autopsyType, regOutputFiles.autopsyPlugins, errFilePath);
}
if (context.dataSourceIngestIsCancelled()) {
return regOutputFiles;
}
// run the full set of rr modules
if (!fullType.isEmpty() && rrFullFound) {
Writer writer = null;
ExecUtil execRR = null;
try {
regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS
logger.log(Level.INFO, "Writing Full RegRipper results to: " + regOutputFiles.fullPlugins); //NON-NLS
writer = new FileWriter(regOutputFiles.fullPlugins);
execRR = new ExecUtil();
execRR.execute(writer, RR_FULL_PATH,
"-r", regFilePath, "-f", fullType); //NON-NLS
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run full RegRipper and process parse some registry files.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile3",
this.getName()));
} catch (InterruptedException ex) {
logger.log(Level.SEVERE, "RegRipper full has been interrupted, failed to parse registry.", ex); //NON-NLS
this.addErrorMessage(
NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile4",
this.getName()));
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
logger.log(Level.SEVERE, "Error closing output writer after running RegRipper full", ex); //NON-NLS
}
}
if (execRR != null) {
execRR.stop();
}
}
regOutputFiles.fullPlugins = outFilePathBase + "-full.txt"; //NON-NLS
String errFilePath = outFilePathBase + "-full.err.txt"; //NON-NLS
logger.log(Level.INFO, "Writing Full RegRipper results to: {0}", regOutputFiles.fullPlugins); //NON-NLS
executeRegRipper(regFilePath, fullType, regOutputFiles.fullPlugins, errFilePath);
}
return regOutputFiles;
}
private void executeRegRipper(String hiveFilePath, String hiveFileType, String outputFile, String errFile) {
try {
logger.log(Level.INFO, "Writing RegRipper results to: {0}", outputFile); //NON-NLS
List<String> commandLine = new ArrayList<>();
commandLine.add(RR_PATH);
commandLine.add("-r"); //NON-NLS
commandLine.add(hiveFilePath);
commandLine.add("-f"); //NON-NLS
commandLine.add(hiveFileType);
ProcessBuilder processBuilder = new ProcessBuilder(commandLine);
processBuilder.redirectOutput(new File(outputFile));
processBuilder.redirectError(new File(errFile));
ExecUtil.execute(processBuilder, new DataSourceIngestModuleProcessTerminator(context));
} catch (IOException ex) {
logger.log(Level.SEVERE, "Unable to run RegRipper", ex); //NON-NLS
this.addErrorMessage(NbBundle.getMessage(this.getClass(), "ExtractRegistry.execRegRip.errMsg.failedAnalyzeRegFile", this.getName()));
}
}
// @@@ VERIFY that we are doing the right thing when we parse multiple NTUSER.DAT
/**
*
@ -558,7 +520,7 @@ class ExtractRegistry extends Extract {
}
break;
default:
logger.log(Level.WARNING, "Unercognized node name: " + dataType);
logger.log(Level.WARNING, "Unrecognized node name: {0}", dataType);
break;
}
}

View File

@ -116,7 +116,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -197,7 +197,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -277,7 +277,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
@ -385,16 +385,16 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}
List<HashMap<String, Object>> tempList = this.dbConnect(temps, downloadQuery);
logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); //NON-NLS
logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); //NON-NLS
for (HashMap<String, Object> result : tempList) {
Collection<BlackboardAttribute> bbattributes = new ArrayList<BlackboardAttribute>();
Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(),
NbBundle.getMessage(this.getClass(),
@ -494,7 +494,7 @@ class Firefox extends Extract {
continue;
}
File dbFile = new File(temps);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
dbFile.delete();
break;
}

View File

@ -93,7 +93,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
for (int i = 0; i < extracters.size(); i++) {
Extract extracter = extracters.get(i);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); //NON-NLS
break;
}
@ -161,7 +161,7 @@ public final class RAImageIngestModule implements DataSourceIngestModule {
historyMsg.toString());
services.postMessage(inboxMsg);
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
return ProcessResult.OK;
}

View File

@ -79,7 +79,7 @@ class RecentDocumentsByLnk extends Extract {
dataFound = true;
for (AbstractFile recentFile : recentFiles) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break;
}

View File

@ -278,7 +278,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS
for (BlackboardArtifact artifact : listArtifacts) {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
break; //User cancled the process.
}
@ -346,7 +346,7 @@ class SearchEngineURLQueryAnalyzer extends Extract {
} catch (TskCoreException e) {
logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); //NON-NLS
} finally {
if (context.isJobCancelled()) {
if (context.dataSourceIngestIsCancelled()) {
logger.info("Operation terminated by user."); //NON-NLS
}
IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(