From 50f49c529a906e333c3acef8a3e3049728a83627 Mon Sep 17 00:00:00 2001 From: alexjacks92 Date: Mon, 7 Apr 2014 15:56:47 -0400 Subject: [PATCH 01/27] Removing existing diff files before writing them out to the jenkins attachment location. --- test/script/regression.py | 134 +++++++++++++------------------------- 1 file changed, 46 insertions(+), 88 deletions(-) diff --git a/test/script/regression.py b/test/script/regression.py index 5e8025fb5d..1cf5c6e46c 100755 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -34,16 +34,13 @@ import xml from time import localtime, strftime from xml.dom.minidom import parse, parseString import smtplib -from email.mime.image import MIMEImage -from email.mime.multipart import MIMEMultipart -from email.mime.text import MIMEText import re import zipfile import zlib -import Emailer import srcupdater from regression_utils import * - +import shutil +import ntpath # # Please read me... # @@ -130,7 +127,6 @@ class TestRunner(object): logres =[] for test_data in test_data_list: Errors.clear_print_logs() - Errors.set_testing_phase(test_data.image) if not (test_config.args.rebuild or os.path.exists(test_data.gold_archive)): msg = "Gold standard doesn't exist, skipping image:" Errors.print_error(msg) @@ -148,28 +144,16 @@ class TestRunner(object): time.sleep(10) Reports.write_html_foot(test_config.html_log) - # TODO: move this elsewhere - if (len(logres)>0): - for lm in logres: - for ln in lm: - Errors.add_email_msg(ln) - # TODO: possibly worth putting this in a sub method if all([ test_data.overall_passed for test_data in test_data_list ]): - Errors.add_email_msg("All images passed.\n") + pass else: - msg = "The following images failed:\n" - for test_data in test_data_list: - if not test_data.overall_passed: - msg += "\t" + test_data.image + "\n" - Errors.add_email_msg(msg) html = open(test_config.html_log) - Errors.add_email_attachment(html.name) + Errors.add_errors_out(html.name) html.close() - - if test_config.email_enabled: - Emailer.send_email(test_config.mail_to, test_config.mail_server, - test_config.mail_subject, Errors.email_body, Errors.email_attachs) + + if test_config.jenkins: + setupAttachments(Errors.errors_out, test_config) def _run_autopsy_ingest(test_data): """Run Autopsy ingest for the image in the given TestData. @@ -234,8 +218,8 @@ class TestRunner(object): diffFiles = [ f for f in os.listdir(test_data.output_path) if os.path.isfile(os.path.join(test_data.output_path,f)) ] for f in diffFiles: if f.endswith("Diff.txt"): - Errors.add_email_attachment(os.path.join(test_data.output_path, f)) - Errors.add_email_attachment(test_data.common_log_path) + Errors.add_errors_out(os.path.join(test_data.output_path, f)) + Errors.add_errors_out(test_data.common_log_path) return logres def _extract_gold(test_data): @@ -303,7 +287,6 @@ class TestRunner(object): shutil.copy(test_data.sorted_log, error_pth) except IOError as e: Errors.print_error(str(e)) - Errors.add_email_message("Not rebuilt properly") print(str(e)) print(traceback.format_exc()) # Rebuild the HTML report @@ -365,6 +348,7 @@ class TestRunner(object): test_data.ant.append("-Dgold_path=" + test_config.gold) test_data.ant.append("-Dout_path=" + make_local_path(test_data.output_path)) + test_data.ant.append("-Ddiff_dir="+ test_config.diff_dir) test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) @@ -582,6 +566,7 @@ class TestConfiguration(object): images: a listof_Image, the images to be tested timeout: a Nat, the amount of time before killing the test ant: a listof_String, the ant command to run the tests + jenkins: a boolean, is this test running through a Jenkins job? """ def __init__(self, args): @@ -608,11 +593,7 @@ class TestConfiguration(object): # Infinite Testing info timer = 0 self.images = [] - # Email info - self.email_enabled = args.email_enabled - self.mail_server = "" - self.mail_to = "" - self.mail_subject = "" + self.jenkins = False # Set the timeout to something huge # The entire tester should not timeout before this number in ms # However it only seems to take about half this time @@ -650,15 +631,18 @@ class TestConfiguration(object): if parsed_config.getElementsByTagName("golddir"): self.gold = parsed_config.getElementsByTagName("golddir")[0].getAttribute("value").encode().decode("utf_8") self.img_gold = make_path(self.gold, 'tmp') - + if parsed_config.getElementsByTagName("jenkins"): + self.jenkins = True + if parsed_config.getElementsByTagName("diffdir"): + self.diff_dir = parsed_config.getElementsByTagName("diffdir")[0].getAttribute("value").encode().decode("utf_8") + else: + self.jenkins = False self._init_imgs(parsed_config) self._init_build_info(parsed_config) - self._init_email_info(parsed_config) except IOError as e: msg = "There was an error loading the configuration file.\n" msg += "\t" + str(e) - Errors.add_email_msg(msg) logging.critical(traceback.format_exc()) print(traceback.format_exc()) @@ -691,7 +675,6 @@ class TestConfiguration(object): else: msg = "File: " + value + " doesn't exist" Errors.print_error(msg) - Errors.add_email_msg(msg) image_count = len(self.images) # Sanity check to see if there are obvious gold images that we are not testing @@ -705,27 +688,6 @@ class TestConfiguration(object): elif (image_count < gold_count): print("******Alert: There are more gold standards than input images, this will not check all gold Standards.\n") - def _init_email_info(self, parsed_config): - """Initializes email information dictionary""" - email_elements = parsed_config.getElementsByTagName("email") - if email_elements: - mail_to = email_elements[0] - self.mail_to = mail_to.getAttribute("value").encode().decode("utf_8") - mail_server_elements = parsed_config.getElementsByTagName("mail_server") - if mail_server_elements: - mail_from = mail_server_elements[0] - self.mail_server = mail_from.getAttribute("value").encode().decode("utf_8") - subject_elements = parsed_config.getElementsByTagName("subject") - if subject_elements: - subject = subject_elements[0] - self.mail_subject = subject.getAttribute("value").encode().decode("utf_8") - if self.mail_server and self.mail_to and self.args.email_enabled: - self.email_enabled = True - print("Email will be sent to ", self.mail_to) - else: - self.email_enabled = False - print("No email will be sent.") - #-------------------------------------------------# # Functions relating to comparing outputs # @@ -803,11 +765,7 @@ class TestResultsDiffer(object): diff_file = codecs.open(diff_path, "wb", "utf_8") dffcmdlst = ["diff", output_file, gold_file] subprocess.call(dffcmdlst, stdout = diff_file) - Errors.add_email_attachment(diff_path) - msg = "There was a difference in " - msg += os.path.basename(output_file) + ".\n" - Errors.add_email_msg(msg) - Errors.print_error(msg) + Errors.add_errors_out(diff_path) return False else: return True @@ -1467,24 +1425,12 @@ class Errors: Attributes: printout: a listof_String, the non-error messages that were printed printerror: a listof_String, the error messages that were printed - email_body: a String, the body of the report email - email_msg_prefix: a String, the prefix for lines added to the email email_attchs: a listof_pathto_File, the files to be attached to the report email """ printout = [] printerror = [] - email_body = "" - email_msg_prefix = "Configuration" - email_attachs = [] - - def set_testing_phase(image_name): - """Change the email message prefix to be the given testing phase. - - Args: - image_name: a String, representing the current image being tested - """ - Errors.email_msg_prefix = image_name + errors_out = [] def print_out(msg): """Print out an informational message. @@ -1509,21 +1455,13 @@ class Errors: Errors.printout = [] Errors.printerror = [] - def add_email_msg(msg): - """Add the given message to the body of the report email. - - Args: - msg: a String, the message to be added to the email - """ - Errors.email_body += Errors.email_msg_prefix + ":" + msg - - def add_email_attachment(path): + def add_errors_out(path): """Add the given file to be an attachment for the report email Args: file: a pathto_File, the file to add """ - Errors.email_attachs.append(path) + Errors.errors_out.append(path) class DiffResults(object): @@ -1605,7 +1543,6 @@ class Args(object): self.exception = False self.exception_string = "" self.fr = False - self.email_enabled = False def parse(self): """Get the command line arguments and parse them.""" @@ -1665,8 +1602,6 @@ class Args(object): elif arg == "-fr" or arg == "--forcerun": print("Not downloading new images") self.fr = True - elif arg == "--email": - self.email_enabled = True else: print(usage()) return False @@ -1875,11 +1810,34 @@ def find_file_in_dir(dir, name, ext): except: raise DirNotFoundException(dir) +def setupAttachments(attachments, test_config): + """Move email attachments to the location specified in the config file. + Used for Jenkins build. + + Args: + attachments: a listof_String, the files to be moved + test_config: TestConfiguration, used to determine where to move the files to + """ + call = ['pwd'] + subprocess.call(call) + + # remove old diff files + filelist = [f for f in os.listdir(test_config.diff_dir) if (f.endswith(".txt") or f.endswith(".html"))] + for f in filelist: + if os.path.isfile(f): + os.remove(f) + + # move in the new diff files + for file in attachments: + filename = ntpath.basename(file) + destination = os.path.join(test_config.diff_dir, filename) + call = ['cp', file, destination] + subprocess.call(call) + class OS: LINUX, MAC, WIN, CYGWIN = range(4) - if __name__ == "__main__": global SYS if _platform == "linux" or _platform == "linux2": From 402d3651cc1338c96054fee20b9df8f17d599698 Mon Sep 17 00:00:00 2001 From: alexjacks92 Date: Tue, 8 Apr 2014 11:56:14 -0400 Subject: [PATCH 02/27] Cleaning up code to work when you aren't running a jenkins job. --- test/script/regression.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/script/regression.py b/test/script/regression.py index 1cf5c6e46c..7c33e62d33 100755 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -348,7 +348,8 @@ class TestRunner(object): test_data.ant.append("-Dgold_path=" + test_config.gold) test_data.ant.append("-Dout_path=" + make_local_path(test_data.output_path)) - test_data.ant.append("-Ddiff_dir="+ test_config.diff_dir) + if test_config.jenkins: + test_data.ant.append("-Ddiff_dir="+ test_config.diff_dir) test_data.ant.append("-Dignore_unalloc=" + "%s" % test_config.args.unallocated) test_data.ant.append("-Dtest.timeout=" + str(test_config.timeout)) @@ -1824,8 +1825,8 @@ def setupAttachments(attachments, test_config): # remove old diff files filelist = [f for f in os.listdir(test_config.diff_dir) if (f.endswith(".txt") or f.endswith(".html"))] for f in filelist: - if os.path.isfile(f): - os.remove(f) + if os.path.isfile(test_config.diff_dir + "/" + f): + os.remove(test_config.diff_dir + "/" + f) # move in the new diff files for file in attachments: From a2e59ad5e12ad84533efb9402ebfa603ae126a72 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 16:30:57 -0400 Subject: [PATCH 03/27] IngestModuleAdapter now has a static moduleRefCount and static methods to access it. --- .../autopsy/ingest/IngestModuleAdapter.java | 19 +++++++++++++++ .../hashdatabase/HashDbIngestModule.java | 24 +++---------------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java index 93379de7d4..1c9d1f0aa8 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java @@ -18,11 +18,30 @@ */ package org.sleuthkit.autopsy.ingest; +import java.util.HashMap; + /** * An adapter that provides a default implementation of the IngestModule * interface. */ public abstract class IngestModuleAdapter implements IngestModule { + // Maps a JobId to the count of instances + static HashMap moduleRefCount = new HashMap<>(); + + public static synchronized void moduleRefCountIncrement(long jobID) { + long count = moduleRefCount.containsKey(jobID) ? moduleRefCount.get(jobID) : 0; + moduleRefCount.put(jobID, count + 1); + } + + public static synchronized long moduleRefCountDecrementAndGet(long jobID) { + if (moduleRefCount.containsKey(jobID)) { + long count = moduleRefCount.get(jobID); + moduleRefCount.put(jobID, --count); + return count; + } else { + return 0; + } + } @Override public void startUp(IngestJobContext context) throws IngestModuleException { diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index f8f2a00cc1..98aca11b72 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -21,7 +21,6 @@ package org.sleuthkit.autopsy.hashdatabase; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; @@ -57,27 +56,10 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges private List knownBadHashSets = new ArrayList<>(); private List knownHashSets = new ArrayList<>(); private long jobID; - // Maps a JobId to the count of instances - static HashMap moduleRefCount = new HashMap<>(); static AtomicLong totalKnownBadCount = new AtomicLong(0); static AtomicLong totalCalctime = new AtomicLong(0); static AtomicLong totalLookuptime = new AtomicLong(0); - - private static synchronized void moduleRefCountIncrement(long jobID) { - long count = moduleRefCount.containsKey(jobID) ? moduleRefCount.get(jobID) : 0; - moduleRefCount.put(jobID, count + 1); - } - - private static synchronized long moduleRefCountDecrementAndGet(long jobID) { - if (moduleRefCount.containsKey(jobID)) { - long count = moduleRefCount.get(jobID); - moduleRefCount.put(jobID, --count); - return count; - } else { - return 0; - } - } - + HashDbIngestModule(HashLookupModuleSettings settings) { this.settings = settings; } @@ -85,7 +67,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges @Override public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException { jobID = context.getJobId(); - moduleRefCountIncrement(jobID); + IngestModuleAdapter.moduleRefCountIncrement(jobID); getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); if (knownBadHashSets.isEmpty()) { services.postMessage(IngestMessage.createWarningMessage( @@ -317,7 +299,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges @Override public void shutDown(boolean ingestJobCancelled) { - if (moduleRefCountDecrementAndGet(jobID) == 0) { + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobID) == 0) { if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) { StringBuilder detailsSb = new StringBuilder(); //details From e76573ee8a74347889f3b7ad0841c7c07755f481 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 16:42:01 -0400 Subject: [PATCH 04/27] Adding the missing bundle properties to this branch since my pull request to develop is still waiting. --- .../org/sleuthkit/autopsy/keywordsearch/Bundle.properties | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties index 040e9e0c03..a9a00a94b5 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/Bundle.properties @@ -263,3 +263,10 @@ KeywordSearchGlobalSearchSettingsPanel.timeRadioButton4.text_1=1 minute (faster KeywordSearchGlobalSearchSettingsPanel.chunksLabel.text=Chunks in keyword index: KeywordSearchGlobalSearchSettingsPanel.timeRadioButton3.toolTipText=5 minutes (overall ingest time will be longer) KeywordSearchGlobalSearchSettingsPanel.timeRadioButton3.text=5 minutes (default) +KeywordSearchIngestModule.regExpHitLbl=Reg Ex hit: +KeywordSearchIngestModule.kwHitLbl=Keyword hit: +KeywordSearchIngestModule.kwHitThLbl=Keyword +KeywordSearchIngestModule.previewThLbl=Preview +KeywordSearchIngestModule.fileThLbl=File +KeywordSearchIngestModule.listThLbl=List +KeywordSearchIngestModule.regExThLbl=Reg Ex \ No newline at end of file From ada3dd732bdaac82caeac19136cee2a1a7d3e21f Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 16:49:51 -0400 Subject: [PATCH 05/27] The renamed prop. --- .../autopsy/keywordsearch/KeywordSearchIngestModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index a8d124934c..2641f8b322 100755 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -916,7 +916,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme detailsSb.append(""); //hit detailsSb.append(""); - detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitLThLbl")); + detailsSb.append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.kwHitThLbl")); detailsSb.append(""); detailsSb.append(""); From 42e83d3b23a3f08bec8a680b39d3d57b321fcdb2 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 17:42:30 -0400 Subject: [PATCH 06/27] ingestStatus is now static and shared between all threads --- .../KeywordSearchIngestModule.java | 89 ++++++++++--------- 1 file changed, 49 insertions(+), 40 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index f6897510b4..ba30cc097f 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -91,7 +91,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme private long dataSourceId; private static AtomicInteger instanceCount = new AtomicInteger(0); //just used for logging private int instanceNum = 0; - + private enum IngestStatus { TEXT_INGESTED, /// Text was extracted by knowing file type and text_ingested @@ -101,8 +101,14 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme SKIPPED_ERROR_TEXTEXTRACT, ///< File was skipped because of text extraction issues SKIPPED_ERROR_IO ///< File was skipped because of IO issues reading it }; - private Map ingestStatus; + private static final Map ingestStatus = new HashMap<>(); //guarded by itself + static void putIngestStatus(long id, IngestStatus status) { + synchronized(ingestStatus) { + ingestStatus.put(id, status); + } + } + KeywordSearchIngestModule(KeywordSearchJobSettings settings) { this.settings = settings; instanceNum = instanceCount.getAndIncrement(); @@ -167,8 +173,6 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme textExtractors.add(new AbstractFileHtmlExtract(this)); textExtractors.add(new AbstractFileTikaTextExtract(this)); - ingestStatus = new HashMap<>(); - List keywordLists = KeywordSearchListsXML.getCurrent().getListsL(); boolean hasKeywordsForSearch = false; for (KeywordList keywordList : keywordLists) { @@ -191,7 +195,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme if (initialized == false) //error initializing indexing/Solr { logger.log(Level.WARNING, "Skipping processing, module not initialized, file: {0}", abstractFile.getName()); - ingestStatus.put(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + putIngestStatus(abstractFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); return ProcessResult.OK; } try { @@ -276,7 +280,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme * Common cleanup code when module stops or final searcher completes */ private void cleanup() { - ingestStatus.clear(); + synchronized(ingestStatus) { + ingestStatus.clear(); + } textExtractors.clear(); textExtractors = null; @@ -297,31 +303,34 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme int error_text = 0; int error_index = 0; int error_io = 0; - for (IngestStatus s : ingestStatus.values()) { - switch (s) { - case TEXT_INGESTED: - ++text_ingested; - break; - case METADATA_INGESTED: - ++metadata_ingested; - break; - case STRINGS_INGESTED: - ++strings_ingested; - break; - case SKIPPED_ERROR_TEXTEXTRACT: - error_text++; - break; - case SKIPPED_ERROR_INDEXING: - error_index++; - break; - case SKIPPED_ERROR_IO: - error_io++; - break; - default: - ; + + synchronized(ingestStatus) { + for (IngestStatus s : ingestStatus.values()) { + switch (s) { + case TEXT_INGESTED: + ++text_ingested; + break; + case METADATA_INGESTED: + ++metadata_ingested; + break; + case STRINGS_INGESTED: + ++strings_ingested; + break; + case SKIPPED_ERROR_TEXTEXTRACT: + error_text++; + break; + case SKIPPED_ERROR_INDEXING: + error_index++; + break; + case SKIPPED_ERROR_IO: + error_io++; + break; + default: + ; + } } } - + StringBuilder msg = new StringBuilder(); msg.append("
").append(EscapeUtil.escapeHtml(attr.getValueString())).append("
"); msg.append(""); @@ -393,16 +402,16 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme private boolean extractStringsAndIndex(AbstractFile aFile) { try { if (stringExtractor.index(aFile)) { - ingestStatus.put(aFile.getId(), IngestStatus.STRINGS_INGESTED); + putIngestStatus(aFile.getId(), IngestStatus.STRINGS_INGESTED); return true; } else { logger.log(Level.WARNING, "Failed to extract strings and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); return false; } } catch (IngesterException ex) { logger.log(Level.WARNING, "Failed to extract strings and ingest, file '" + aFile.getName() + "' (id: " + aFile.getId() + ").", ex); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); return false; } } @@ -448,9 +457,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme if ((indexContent == false || aFile.isDir() || size == 0)) { try { ingester.ingest(aFile, false); //meta-data only - ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); + putIngestStatus(aFile.getId(), IngestStatus.METADATA_INGESTED); } catch (IngesterException ex) { - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); } return; @@ -484,9 +493,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme if (AbstractFileExtract.ARCHIVE_MIME_TYPES.contains(detectedFormat)) { try { ingester.ingest(aFile, false); //meta-data only - ingestStatus.put(aFile.getId(), IngestStatus.METADATA_INGESTED); + putIngestStatus(aFile.getId(), IngestStatus.METADATA_INGESTED); } catch (IngesterException ex) { - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); logger.log(Level.WARNING, "Unable to index meta-data for file: " + aFile.getId(), ex); } return; @@ -499,20 +508,20 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme //logger.log(Level.INFO, "indexing: " + aFile.getName()); if (!extractTextAndIndex(aFile, detectedFormat)) { logger.log(Level.WARNING, "Failed to extract text and ingest, file ''{0}'' (id: {1}).", new Object[]{aFile.getName(), aFile.getId()}); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); } else { - ingestStatus.put(aFile.getId(), IngestStatus.TEXT_INGESTED); + putIngestStatus(aFile.getId(), IngestStatus.TEXT_INGESTED); wasTextAdded = true; } } catch (IngesterException e) { logger.log(Level.INFO, "Could not extract text with Tika, " + aFile.getId() + ", " + aFile.getName(), e); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_INDEXING); } catch (Exception e) { logger.log(Level.WARNING, "Error extracting text with Tika, " + aFile.getId() + ", " + aFile.getName(), e); - ingestStatus.put(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); + putIngestStatus(aFile.getId(), IngestStatus.SKIPPED_ERROR_TEXTEXTRACT); } } From 000a70a7d9a8be978ac8f7ed0425858902735501 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 17:48:21 -0400 Subject: [PATCH 07/27] Just do one summary postMessage for KeywordSearch module. --- .../autopsy/keywordsearch/KeywordSearchIngestModule.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index ba30cc097f..d19e0ea328 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -125,6 +125,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme initialized = false; jobId = context.getJobId(); + IngestModuleAdapter.moduleRefCountIncrement(jobId); caseHandle = Case.getCurrentCase().getSleuthkitCase(); tikaFormatDetector = new Tika(); ingester = Server.getIngester(); @@ -251,7 +252,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme // Remove from the search list and trigger final commit and final search SearchRunner.getInstance().endJob(jobId); - postIndexSummary(); + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { + postIndexSummary(); + } //log number of files / chunks in index //signal a potential change in number of text_ingested files From 4be1592f955fca3d964a7bfb24cdda034ed87505 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Tue, 8 Apr 2014 17:48:35 -0400 Subject: [PATCH 08/27] renaming --- .../autopsy/ingest/IngestModuleAdapter.java | 14 +++++++------- .../autopsy/hashdatabase/HashDbIngestModule.java | 8 ++++---- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java index 1c9d1f0aa8..c2c4baa1ee 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java @@ -28,15 +28,15 @@ public abstract class IngestModuleAdapter implements IngestModule { // Maps a JobId to the count of instances static HashMap moduleRefCount = new HashMap<>(); - public static synchronized void moduleRefCountIncrement(long jobID) { - long count = moduleRefCount.containsKey(jobID) ? moduleRefCount.get(jobID) : 0; - moduleRefCount.put(jobID, count + 1); + public static synchronized void moduleRefCountIncrement(long jobId) { + long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0; + moduleRefCount.put(jobId, count + 1); } - public static synchronized long moduleRefCountDecrementAndGet(long jobID) { - if (moduleRefCount.containsKey(jobID)) { - long count = moduleRefCount.get(jobID); - moduleRefCount.put(jobID, --count); + public static synchronized long moduleRefCountDecrementAndGet(long jobId) { + if (moduleRefCount.containsKey(jobId)) { + long count = moduleRefCount.get(jobId); + moduleRefCount.put(jobId, --count); return count; } else { return 0; diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 98aca11b72..473884a3a8 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -55,7 +55,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges private final HashLookupModuleSettings settings; private List knownBadHashSets = new ArrayList<>(); private List knownHashSets = new ArrayList<>(); - private long jobID; + private long jobId; static AtomicLong totalKnownBadCount = new AtomicLong(0); static AtomicLong totalCalctime = new AtomicLong(0); static AtomicLong totalLookuptime = new AtomicLong(0); @@ -66,8 +66,8 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges @Override public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException { - jobID = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobID); + jobId = context.getJobId(); + IngestModuleAdapter.moduleRefCountIncrement(jobId); getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); if (knownBadHashSets.isEmpty()) { services.postMessage(IngestMessage.createWarningMessage( @@ -299,7 +299,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges @Override public void shutDown(boolean ingestJobCancelled) { - if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobID) == 0) { + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { if ((!knownBadHashSets.isEmpty()) || (!knownHashSets.isEmpty())) { StringBuilder detailsSb = new StringBuilder(); //details From e740ade703153f0b3e29191786bc3c7b0b338943 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 10:49:21 -0400 Subject: [PATCH 09/27] HashDB module: only post error messages once per job --- .../autopsy/ingest/IngestModuleAdapter.java | 6 ++- .../hashdatabase/HashDbIngestModule.java | 40 ++++++++++--------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java index c2c4baa1ee..101ea80dae 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java @@ -28,9 +28,11 @@ public abstract class IngestModuleAdapter implements IngestModule { // Maps a JobId to the count of instances static HashMap moduleRefCount = new HashMap<>(); - public static synchronized void moduleRefCountIncrement(long jobId) { + public static synchronized long moduleRefCountIncrement(long jobId) { long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0; - moduleRefCount.put(jobId, count + 1); + long nextCount = count + 1; + moduleRefCount.put(jobId, nextCount); + return nextCount; } public static synchronized long moduleRefCountDecrementAndGet(long jobId) { diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 473884a3a8..97cf92b32f 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -66,26 +66,30 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges @Override public void startUp(org.sleuthkit.autopsy.ingest.IngestJobContext context) throws IngestModuleException { - jobId = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobId); + jobId = context.getJobId(); getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); - if (knownBadHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage( - HashLookupModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownBadHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); - } - getEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets); - if (knownHashSets.isEmpty()) { - services.postMessage(IngestMessage.createWarningMessage( - HashLookupModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.noKnownHashDbSetMsg"), - NbBundle.getMessage(this.getClass(), - "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + + if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + // if first module for this job then post error msgs if needed + + if (knownBadHashSets.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage( + HashLookupModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownBadHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownBadFileSearchWillNotExecuteWarn"))); + } + + if (knownHashSets.isEmpty()) { + services.postMessage(IngestMessage.createWarningMessage( + HashLookupModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.noKnownHashDbSetMsg"), + NbBundle.getMessage(this.getClass(), + "HashDbIngestModule.knownFileSearchWillNotExecuteWarn"))); + } } } From d8d03aeae1d099d1dea82e3177c108e9c09cef54 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 11:00:36 -0400 Subject: [PATCH 10/27] KeywordSearch: check the server and existence of keywords only once per job --- .../KeywordSearchIngestModule.java | 79 ++++++++++--------- 1 file changed, 41 insertions(+), 38 deletions(-) diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index d19e0ea328..06bdec668b 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -122,41 +122,57 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme @Override public void startUp(IngestJobContext context) throws IngestModuleException { logger.log(Level.INFO, "Initializing instance {0}", instanceNum); - initialized = false; - + initialized = false; jobId = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobId); caseHandle = Case.getCurrentCase().getSleuthkitCase(); tikaFormatDetector = new Tika(); ingester = Server.getIngester(); - final Server server = KeywordSearch.getServer(); - try { - if (!server.isRunning()) { + if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + // if first module for this job then check the server and existence of keywords + + final Server server = KeywordSearch.getServer(); + try { + if (!server.isRunning()) { + String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); + logger.log(Level.SEVERE, msg); + String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); + services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(), msg, details)); + throw new IngestModuleException(msg); + } + } catch (KeywordSearchModuleException ex) { + logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); + //this means Solr is not properly initialized String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - logger.log(Level.SEVERE, msg); String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(), msg, details)); throw new IngestModuleException(msg); } - } catch (KeywordSearchModuleException ex) { - logger.log(Level.WARNING, "Error checking if Solr server is running while initializing ingest", ex); - //this means Solr is not properly initialized - String msg = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.badInitMsg"); - String details = NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.tryStopSolrMsg", msg); - services.postMessage(IngestMessage.createErrorMessage(KeywordSearchModuleFactory.getModuleName(), msg, details)); - throw new IngestModuleException(msg); - } - try { - // make an actual query to verify that server is responding - // we had cases where getStatus was OK, but the connection resulted in a 404 - server.queryNumIndexedDocuments(); - } catch (KeywordSearchModuleException | NoOpenCoreException ex) { - throw new IngestModuleException( - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.exception.errConnToSolr.msg", - ex.getMessage())); - } + try { + // make an actual query to verify that server is responding + // we had cases where getStatus was OK, but the connection resulted in a 404 + server.queryNumIndexedDocuments(); + } catch (KeywordSearchModuleException | NoOpenCoreException ex) { + throw new IngestModuleException( + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.exception.errConnToSolr.msg", + ex.getMessage())); + } + // check if this job has any searchable keywords + List keywordLists = KeywordSearchListsXML.getCurrent().getListsL(); + boolean hasKeywordsForSearch = false; + for (KeywordList keywordList : keywordLists) { + if (settings.isKeywordListEnabled(keywordList.getName()) && !keywordList.getKeywords().isEmpty()) { + hasKeywordsForSearch = true; + break; + } + } + if (!hasKeywordsForSearch) { + services.postMessage(IngestMessage.createWarningMessage(KeywordSearchModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), + NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); + } + } + //initialize extractors stringExtractor = new AbstractFileStringExtract(this); stringExtractor.setScripts(KeywordSearchSettings.getStringExtractScripts()); @@ -173,20 +189,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme //order matters, more specific extractors first textExtractors.add(new AbstractFileHtmlExtract(this)); textExtractors.add(new AbstractFileTikaTextExtract(this)); - - List keywordLists = KeywordSearchListsXML.getCurrent().getListsL(); - boolean hasKeywordsForSearch = false; - for (KeywordList keywordList : keywordLists) { - if (settings.isKeywordListEnabled(keywordList.getName()) && !keywordList.getKeywords().isEmpty()) { - hasKeywordsForSearch = true; - break; - } - } - if (!hasKeywordsForSearch) { - services.postMessage(IngestMessage.createWarningMessage(KeywordSearchModuleFactory.getModuleName(), NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.noKwInLstMsg"), - NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.init.onlyIdxKwSkipMsg"))); - } - + indexer = new Indexer(); initialized = true; } From 85451734a7abf0cc3bd03d3b6341f6d3bbeb0d5f Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 11:08:23 -0400 Subject: [PATCH 11/27] remove unused messageId var --- .../thunderbirdparser/ThunderbirdMboxFileIngestModule.java | 1 - 1 file changed, 1 deletion(-) diff --git a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java index 2389b2e0f4..289f1fa5a0 100644 --- a/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java +++ b/thunderbirdparser/src/org/sleuthkit/autopsy/thunderbirdparser/ThunderbirdMboxFileIngestModule.java @@ -54,7 +54,6 @@ public final class ThunderbirdMboxFileIngestModule extends IngestModuleAdapter i private static final Logger logger = Logger.getLogger(ThunderbirdMboxFileIngestModule.class.getName()); private IngestServices services = IngestServices.getInstance(); - private int messageId = 0; // RJCTODO: Not thread safe private FileManager fileManager; private IngestJobContext context; From 09376f73d4ce225eae446f0b3556e1ce953f9143 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 12:25:45 -0400 Subject: [PATCH 12/27] multithreading updates for ExifParserFileIngestModule --- .../exif/ExifParserFileIngestModule.java | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java index 3df8d1686e..d12c63cff4 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java @@ -32,11 +32,13 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Date; +import java.util.concurrent.atomic.AtomicInteger; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.ImageUtils; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -57,12 +59,20 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem private static final Logger logger = Logger.getLogger(ExifParserFileIngestModule.class.getName()); private final IngestServices services = IngestServices.getInstance(); - private int filesProcessed = 0; - private boolean filesToFire = false; + private AtomicInteger filesProcessed = new AtomicInteger(0); + private volatile boolean filesToFire = false; + private long jobId; ExifParserFileIngestModule() { } + @Override + public void startUp(IngestJobContext context) throws IngestModuleException { + jobId = context.getJobId(); + IngestModuleAdapter.moduleRefCountIncrement(jobId); + } + + @Override public ProcessResult process(AbstractFile content) { //skip unalloc @@ -76,8 +86,8 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem } // update the tree every 1000 files if we have EXIF data that is not being being displayed - filesProcessed++; - if ((filesToFire) && (filesProcessed % 1000 == 0)) { + final int filesProcessedValue = filesProcessed.incrementAndGet(); + if ((filesToFire) && (filesProcessedValue % 1000 == 0)) { services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); filesToFire = false; } @@ -187,9 +197,12 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem @Override public void shutDown(boolean ingestJobCancelled) { - if (filesToFire) { - //send the final new data event - services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + // We only need to check for this final event on the last thread per job + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { + if (filesToFire) { + //send the final new data event + services.fireModuleDataEvent(new ModuleDataEvent(ExifParserModuleFactory.getModuleName(), BlackboardArtifact.ARTIFACT_TYPE.TSK_METADATA_EXIF)); + } } } } \ No newline at end of file From b0b9e8201ccbee6463ef0f3129917f47dc0c580e Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 13:32:58 -0400 Subject: [PATCH 13/27] FileExtMismatchIngestModule: only send one final message --- .../exif/ExifParserFileIngestModule.java | 2 +- .../FileExtMismatchIngestModule.java | 44 +++++++++++-------- .../KeywordSearchIngestModule.java | 1 + 3 files changed, 27 insertions(+), 20 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java index d12c63cff4..1fa1250960 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java @@ -197,7 +197,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem @Override public void shutDown(boolean ingestJobCancelled) { - // We only need to check for this final event on the last thread per job + // We only need to check for this final event on the last module per job if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { if (filesToFire) { //send the final new data event diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java index a04686bded..f7bb46f95f 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java @@ -23,6 +23,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; @@ -38,7 +39,6 @@ import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.TskCoreException; import org.sleuthkit.datamodel.TskData; -import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskException; /** @@ -50,8 +50,9 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements private final IngestServices services = IngestServices.getInstance(); private final FileExtMismatchDetectorModuleSettings settings; private HashMap SigTypeToExtMap = new HashMap<>(); - private long processTime = 0; - private long numFiles = 0; + private long jobId; + private static AtomicLong processTime = new AtomicLong(0); + private static AtomicLong numFiles = new AtomicLong(0); FileExtMismatchIngestModule(FileExtMismatchDetectorModuleSettings settings) { this.settings = settings; @@ -59,6 +60,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements @Override public void startUp(IngestJobContext context) throws IngestModuleException { + jobId = context.getJobId(); + IngestModuleAdapter.moduleRefCountIncrement(jobId); FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault(); SigTypeToExtMap = xmlLoader.load(); } @@ -82,8 +85,8 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements boolean mismatchDetected = compareSigTypeToExt(abstractFile); - processTime += (System.currentTimeMillis() - startTime); - numFiles++; + processTime.getAndAdd(System.currentTimeMillis() - startTime); + numFiles.getAndIncrement(); if (mismatchDetected) { // add artifact @@ -149,19 +152,22 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements @Override public void shutDown(boolean ingestJobCancelled) { - StringBuilder detailsSb = new StringBuilder(); - detailsSb.append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.knowFileHeaderLbl")).append("").append(text_ingested).append("
").append(NbBundle.getMessage(this.getClass(), "KeywordSearchIngestModule.postIndexSummary.fileGenStringsHead")).append("").append(strings_ingested).append("
"); - detailsSb.append(""); - detailsSb.append("\n"); - detailsSb.append("\n"); - detailsSb.append("
").append(FileExtMismatchDetectorModuleFactory.getModuleName()).append("
").append( - NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalProcTime")) - .append("").append(processTime).append("
").append( - NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalFiles")) - .append("").append(numFiles).append("
"); - services.postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileExtMismatchDetectorModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "FileExtMismatchIngestModule.complete.svcMsg.text"), - detailsSb.toString())); + // We only need to post the summary msg from the last module per job + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { + StringBuilder detailsSb = new StringBuilder(); + detailsSb.append(""); + detailsSb.append(""); + detailsSb.append("\n"); + detailsSb.append("\n"); + detailsSb.append("
").append(FileExtMismatchDetectorModuleFactory.getModuleName()).append("
").append( + NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalProcTime")) + .append("").append(processTime.get()).append("
").append( + NbBundle.getMessage(this.getClass(), "FileExtMismatchIngestModule.complete.totalFiles")) + .append("").append(numFiles.get()).append("
"); + services.postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileExtMismatchDetectorModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), + "FileExtMismatchIngestModule.complete.svcMsg.text"), + detailsSb.toString())); + } } } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index 06bdec668b..ad903141ed 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -255,6 +255,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme // Remove from the search list and trigger final commit and final search SearchRunner.getInstance().endJob(jobId); + // We only need to post the summary msg from the last module per job if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { postIndexSummary(); } From be4f3194fc7970c9d4fcb4787212d59341e57f8b Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 13:57:48 -0400 Subject: [PATCH 14/27] FileTypeIdIngestModule: only send one final message --- .../filetypeid/FileTypeIdIngestModule.java | 52 ++++++++++++------- 1 file changed, 32 insertions(+), 20 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 8e0c790656..48e1a997f4 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -18,11 +18,12 @@ */ package org.sleuthkit.autopsy.modules.filetypeid; +import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; - import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.FileIngestModule; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.AbstractFile; @@ -44,8 +45,10 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI private static final Logger logger = Logger.getLogger(FileTypeIdIngestModule.class.getName()); private static final long MIN_FILE_SIZE = 512; private final FileTypeIdModuleSettings settings; - private long matchTime = 0; - private long numFiles = 0; + private long jobId; + private static AtomicLong matchTime = new AtomicLong(0); + private static AtomicLong numFiles = new AtomicLong(0); + // The detector. Swap out with a different implementation of FileTypeDetectionInterface as needed. // If desired in the future to be more knowledgable about weird files or rare formats, we could // actually have a list of detectors which are called in order until a match is found. @@ -55,6 +58,12 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI this.settings = settings; } + @Override + public void startUp(IngestJobContext context) throws IngestModuleException { + jobId = context.getJobId(); + IngestModuleAdapter.moduleRefCountIncrement(jobId); + } + @Override public ProcessResult process(AbstractFile abstractFile) { // skip non-files @@ -75,8 +84,8 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI try { long startTime = System.currentTimeMillis(); FileTypeDetectionInterface.FileIdInfo fileId = detector.attemptMatch(abstractFile); - matchTime += (System.currentTimeMillis() - startTime); - numFiles++; + matchTime.getAndAdd(System.currentTimeMillis() - startTime); + numFiles.getAndIncrement(); if (!fileId.type.isEmpty()) { // add artifact @@ -98,22 +107,25 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI @Override public void shutDown(boolean ingestJobCancelled) { - StringBuilder detailsSb = new StringBuilder(); - detailsSb.append(""); - detailsSb.append(""); - detailsSb.append("\n"); - detailsSb.append("\n"); - detailsSb.append("
").append(FileTypeIdModuleFactory.getModuleName()).append("
") - .append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalProcTime")) - .append("").append(matchTime).append("
") - .append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalFiles")) - .append("").append(numFiles).append("
"); - IngestServices.getInstance().postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileTypeIdModuleFactory.getModuleName(), - NbBundle.getMessage(this.getClass(), - "FileTypeIdIngestModule.complete.srvMsg.text"), - detailsSb.toString())); + // We only need to post the summary msg from the last module per job + if (IngestModuleAdapter.moduleRefCountDecrementAndGet(jobId) == 0) { + StringBuilder detailsSb = new StringBuilder(); + detailsSb.append(""); + detailsSb.append(""); + detailsSb.append("\n"); + detailsSb.append("\n"); + detailsSb.append("
").append(FileTypeIdModuleFactory.getModuleName()).append("
") + .append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalProcTime")) + .append("").append(matchTime.get()).append("
") + .append(NbBundle.getMessage(this.getClass(), "FileTypeIdIngestModule.complete.totalFiles")) + .append("").append(numFiles.get()).append("
"); + IngestServices.getInstance().postMessage(IngestMessage.createMessage(IngestMessage.MessageType.INFO, FileTypeIdModuleFactory.getModuleName(), + NbBundle.getMessage(this.getClass(), + "FileTypeIdIngestModule.complete.srvMsg.text"), + detailsSb.toString())); + } } - + /** * Validate if a given mime type is in the detector's registry. * From 1e255f09794ebc0402f73d07b012d95dc6cbaff2 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 14:07:05 -0400 Subject: [PATCH 15/27] SevenZipIngestModule: only check 7zip init once --- .../sevenzip/SevenZipIngestModule.java | 31 +++++++++++-------- .../KeywordSearchIngestModule.java | 4 +-- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java index 8bfc126cef..76ffa70926 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java @@ -90,6 +90,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F private final byte[] fileHeaderBuffer = new byte[readHeaderSize]; private static final int ZIP_SIGNATURE_BE = 0x504B0304; private IngestJobContext context; + private long jobId; SevenZipIngestModule() { } @@ -97,6 +98,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; + jobId = context.getJobId(); final Case currentCase = Case.getCurrentCase(); @@ -121,20 +123,23 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F } } - try { - SevenZip.initSevenZipFromPlatformJAR(); - String platform = SevenZip.getUsedPlatform(); - logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: {0}", platform); - } catch (SevenZipNativeInitializationException e) { - logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); - String msg = NbBundle.getMessage(this.getClass(), "SevenZipIngestModule.init.errInitModule.msg", - ArchiveFileExtractorModuleFactory.getModuleName()); - String details = NbBundle.getMessage(this.getClass(), "SevenZipIngestModule.init.errCantInitLib", - e.getMessage()); - services.postMessage(IngestMessage.createErrorMessage(ArchiveFileExtractorModuleFactory.getModuleName(), msg, details)); - throw new RuntimeException(e); + // if first instance of this module for this job then check 7zip init + if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + try { + SevenZip.initSevenZipFromPlatformJAR(); + String platform = SevenZip.getUsedPlatform(); + logger.log(Level.INFO, "7-Zip-JBinding library was initialized on supported platform: {0}", platform); + } catch (SevenZipNativeInitializationException e) { + logger.log(Level.SEVERE, "Error initializing 7-Zip-JBinding library", e); + String msg = NbBundle.getMessage(this.getClass(), "SevenZipIngestModule.init.errInitModule.msg", + ArchiveFileExtractorModuleFactory.getModuleName()); + String details = NbBundle.getMessage(this.getClass(), "SevenZipIngestModule.init.errCantInitLib", + e.getMessage()); + services.postMessage(IngestMessage.createErrorMessage(ArchiveFileExtractorModuleFactory.getModuleName(), msg, details)); + throw new RuntimeException(e); + } } - + archiveDepthCountTree = new ArchiveDepthCountTree(); } diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index ad903141ed..fb7b5cc3d5 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -128,9 +128,9 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme tikaFormatDetector = new Tika(); ingester = Server.getIngester(); + // increment the module reference count + // if first instance of this module for this job then check the server and existence of keywords if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { - // if first module for this job then check the server and existence of keywords - final Server server = KeywordSearch.getServer(); try { if (!server.isRunning()) { From 2f098d2202e6c2e6a70abfaa67897e9db10d6eef Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 14:11:35 -0400 Subject: [PATCH 16/27] made a string final --- .../sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java index 5a0bc208a6..875dcaa772 100644 --- a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java +++ b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java @@ -53,7 +53,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges private static final Logger logger = Logger.getLogger(ScalpelCarverIngestModule.class.getName()); private final String MODULE_OUTPUT_DIR_NAME = "ScalpelCarver"; private String moduleOutputDirPath; - private String configFileName = "scalpel.conf"; + private final String configFileName = "scalpel.conf"; private String configFilePath; private boolean initialized = false; private ScalpelCarver carver; From e2d66535abcc3515a2fef6b95eed7bc4520c0277 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 15:14:48 -0400 Subject: [PATCH 17/27] fix qualifier consistency issues with the table model --- .../autopsy/ingest/IngestMessagePanel.java | 34 +++++++++++++------ 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index 7596e1f4cd..dd8314f41e 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -395,18 +395,18 @@ class IngestMessagePanel extends JPanel implements TableModelListener { } @Override - synchronized public int getRowCount() { + public synchronized int getRowCount() { return getNumberGroups(); } - public void markAllSeen() { + public synchronized void markAllSeen() { for (TableEntry entry : messageData) { entry.hasBeenSeen(true); } fireTableChanged(new TableModelEvent(this)); } - public int getNumberNewMessages() { + public synchronized int getNumberNewMessages() { int newMessages = 0; for (TableEntry entry : messageData) { if (!entry.hasBeenSeen()) { @@ -416,11 +416,11 @@ class IngestMessagePanel extends JPanel implements TableModelListener { return newMessages; } - synchronized int getNumberGroups() { + public synchronized int getNumberGroups() { return messageData.size(); } - synchronized int getNumberMessages() { + public synchronized int getNumberMessages() { int total = 0; for (TableEntry e : messageData) { total += e.messageGroup.getCount(); @@ -428,7 +428,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { return total; } - synchronized int getNumberUnreadMessages() { + public synchronized int getNumberUnreadMessages() { int total = 0; for (TableEntry e : messageData) { if (!e.hasBeenVisited) { @@ -438,7 +438,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { return total; } - synchronized int getNumberUnreadGroups() { + public synchronized int getNumberUnreadGroups() { int total = 0; for (TableEntry e : messageData) { if (!e.hasBeenVisited) { @@ -513,7 +513,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { return ret; } - synchronized public void addMessage(IngestMessage m) { + public synchronized void addMessage(IngestMessage m) { //check how many messages per module with the same uniqness //and add to existing group or create a new group String moduleName = m.getSource(); @@ -628,15 +628,27 @@ class IngestMessagePanel extends JPanel implements TableModelListener { } public synchronized boolean isVisited(int rowNumber) { - return messageData.get(rowNumber).hasBeenVisited(); + if (rowNumber < messageData.size()) { + return messageData.get(rowNumber).hasBeenVisited(); + } else { + return false; + } } public synchronized MessageType getMessageType(int rowNumber) { - return messageData.get(rowNumber).messageGroup.getMessageType(); + if (rowNumber < messageData.size()) { + return messageData.get(rowNumber).messageGroup.getMessageType(); + } else { + return null; + } } public synchronized IngestMessageGroup getMessageGroup(int rowNumber) { - return messageData.get(rowNumber).messageGroup; + if (rowNumber < messageData.size()) { + return messageData.get(rowNumber).messageGroup; + } else { + return null; + } } public synchronized void reSort(boolean chronoLogical) { From 3bd0b6a1a1d0f98043083c021dc4e008d6c41ef2 Mon Sep 17 00:00:00 2001 From: alexjacks92 Date: Wed, 9 Apr 2014 15:16:05 -0400 Subject: [PATCH 18/27] Work to incorporate module descriptions into the UI. --- .../autopsy/ingest/Bundle.properties | 4 +- .../ingest/IngestJobConfigurationPanel.form | 83 +++++++++++++++---- .../ingest/IngestJobConfigurationPanel.java | 67 +++++++++++---- 3 files changed, 120 insertions(+), 34 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties index 837612fa53..0a088da4dd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties +++ b/Core/src/org/sleuthkit/autopsy/ingest/Bundle.properties @@ -21,7 +21,6 @@ IngestJob.progress.fileIngest.displayName=File Ingest of {0} IngestJob.progress.cancelling={0} (Cancelling...) IngestJobConfigurationPanel.processUnallocCheckbox.toolTipText=Processes unallocated space, such as deleted files. Produces more complete results, but it may take longer to process on large images. IngestJobConfigurationPanel.processUnallocCheckbox.text=Process Unallocated Space -IngestJobConfigurationPanel.advancedButton.text=Advanced IngestJob.toString.text=ScheduledTask'{'input\={0}, modules\={1}'}' IngestJobLauncher.modName.tbirdParser.text=Thunderbird Parser IngestJobLauncher.modName.mboxParser.text=MBox Parser @@ -98,3 +97,6 @@ IngestScheduler.remove.exception.notSupported.msg=Not supported. IngestScheduler.DataSourceScheduler.exception.next.msg=There is no data source tasks in the queue, check hasNext() IngestScheduler.DataSourceScheduler.exception.remove.msg=Removing of scheduled data source ingest tasks is not supported. IngestScheduler.DataSourceScheduler.toString.size=DataSourceQueue, size\: {0} +Label1 +IngestJobConfigurationPanel.advancedButton.text=Advanced +IngestJobConfigurationPanel.advancedButton.actionCommand=Advanced diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.form b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.form index ae6e4c7e93..a0533766ff 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.form +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.form @@ -13,7 +13,7 @@ - + @@ -48,9 +48,9 @@ - + - + @@ -108,24 +108,40 @@ - - - - - - + + + + + + + + + + + + - + + + + + + + + + + + + + + + - - - - @@ -136,6 +152,9 @@ + + + @@ -164,6 +183,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -183,7 +236,7 @@ - + diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.java index 0e0ce8d74d..a0c617a4bd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestJobConfigurationPanel.java @@ -107,6 +107,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { simplePanel.revalidate(); simplePanel.repaint(); advancedButton.setEnabled(null != selectedModule.getGlobalSettingsPanel()); + descriptionLabel.setText(selectedModule.getDescription()); } } }); @@ -131,12 +132,14 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { jSeparator2 = new javax.swing.JSeparator(); jScrollPane1 = new javax.swing.JScrollPane(); simplePanel = new javax.swing.JPanel(); + scrollpane = new javax.swing.JScrollPane(); + descriptionLabel = new javax.swing.JTextArea(); processUnallocPanel = new javax.swing.JPanel(); processUnallocCheckbox = new javax.swing.JCheckBox(); setMaximumSize(new java.awt.Dimension(5750, 3000)); setMinimumSize(new java.awt.Dimension(522, 257)); - setPreferredSize(new java.awt.Dimension(575, 300)); + setPreferredSize(new java.awt.Dimension(575, 400)); modulesScrollPane.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(160, 160, 160))); modulesScrollPane.setPreferredSize(new java.awt.Dimension(160, 160)); @@ -158,6 +161,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { jPanel1.setPreferredSize(new java.awt.Dimension(338, 257)); advancedButton.setText(org.openide.util.NbBundle.getMessage(IngestJobConfigurationPanel.class, "IngestJobConfigurationPanel.advancedButton.text")); // NOI18N + advancedButton.setActionCommand(org.openide.util.NbBundle.getMessage(IngestJobConfigurationPanel.class, "IngestJobConfigurationPanel.advancedButton.actionCommand")); // NOI18N advancedButton.setEnabled(false); advancedButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { @@ -171,25 +175,49 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { simplePanel.setLayout(new javax.swing.BoxLayout(simplePanel, javax.swing.BoxLayout.PAGE_AXIS)); jScrollPane1.setViewportView(simplePanel); + scrollpane.setBorder(null); + scrollpane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); + scrollpane.setVerticalScrollBarPolicy(javax.swing.ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER); + + descriptionLabel.setEditable(false); + descriptionLabel.setBackground(new java.awt.Color(240, 240, 240)); + descriptionLabel.setColumns(20); + descriptionLabel.setFont(new java.awt.Font("Tahoma", 0, 11)); // NOI18N + descriptionLabel.setLineWrap(true); + descriptionLabel.setRows(5); + descriptionLabel.setWrapStyleWord(true); + descriptionLabel.setBorder(null); + scrollpane.setViewportView(descriptionLabel); + javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 326, Short.MAX_VALUE) - .addComponent(jSeparator2, javax.swing.GroupLayout.Alignment.TRAILING) - .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() - .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addComponent(advancedButton) - .addContainerGap()) + .addGroup(jPanel1Layout.createSequentialGroup() + .addContainerGap() + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 316, Short.MAX_VALUE) + .addGroup(jPanel1Layout.createSequentialGroup() + .addComponent(scrollpane) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(advancedButton) + .addGap(14, 14, 14)))) + .addComponent(jSeparator2) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel1Layout.createSequentialGroup() - .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) - .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) - .addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) - .addGap(0, 0, 0) - .addComponent(advancedButton) + .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 242, Short.MAX_VALUE) + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel1Layout.createSequentialGroup() + .addGap(22, 22, 22) + .addComponent(scrollpane, javax.swing.GroupLayout.PREFERRED_SIZE, 65, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(jPanel1Layout.createSequentialGroup() + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(jPanel1Layout.createSequentialGroup() + .addGap(18, 18, 18) + .addComponent(advancedButton))) .addContainerGap()) ); @@ -210,7 +238,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { .addGroup(processUnallocPanelLayout.createSequentialGroup() .addContainerGap() .addComponent(processUnallocCheckbox) - .addContainerGap(60, Short.MAX_VALUE)) + .addContainerGap(108, Short.MAX_VALUE)) ); processUnallocPanelLayout.setVerticalGroup( processUnallocPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) @@ -238,15 +266,19 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { .addGroup(layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) - .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 278, Short.MAX_VALUE) + .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, 342, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() - .addComponent(modulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 233, Short.MAX_VALUE) + .addComponent(modulesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(processUnallocPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap()) ); }// //GEN-END:initComponents + private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed + processUnallocatedSpace = processUnallocCheckbox.isSelected(); + }//GEN-LAST:event_processUnallocCheckboxActionPerformed + private void advancedButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_advancedButtonActionPerformed final AdvancedConfigurationDialog dialog = new AdvancedConfigurationDialog(); @@ -270,11 +302,9 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { dialog.display(selectedModule.getGlobalSettingsPanel()); }//GEN-LAST:event_advancedButtonActionPerformed - private void processUnallocCheckboxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_processUnallocCheckboxActionPerformed - processUnallocatedSpace = processUnallocCheckbox.isSelected(); - }//GEN-LAST:event_processUnallocCheckboxActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton advancedButton; + private javax.swing.JTextArea descriptionLabel; private javax.swing.JPanel jPanel1; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JSeparator jSeparator2; @@ -282,6 +312,7 @@ class IngestJobConfigurationPanel extends javax.swing.JPanel { private javax.swing.JTable modulesTable; private javax.swing.JCheckBox processUnallocCheckbox; private javax.swing.JPanel processUnallocPanel; + private javax.swing.JScrollPane scrollpane; private javax.swing.JPanel simplePanel; private javax.swing.ButtonGroup timeGroup; // End of variables declaration//GEN-END:variables From 7e7f7b43bb453624e5b341945598a928e79df09f Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 17:35:18 -0400 Subject: [PATCH 19/27] updates to make IngestMessagePanel table model thread-safe --- .../autopsy/ingest/IngestMessagePanel.java | 88 ++++++++++--------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index dd8314f41e..74a0901004 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -713,26 +713,26 @@ class IngestMessagePanel extends JPanel implements TableModelListener { messages.add(message); } - List getMessages() { + synchronized List getMessages() { return messages; } - void add(IngestMessage message) { + synchronized void add(IngestMessage message) { messages.add(message); } //add all messages from another group - void addAll(IngestMessageGroup group) { + synchronized void addAll(IngestMessageGroup group) { for (IngestMessage m : group.getMessages()) { messages.add(m); } } - int getCount() { + synchronized int getCount() { return messages.size(); } - String getDetails() { + synchronized String getDetails() { StringBuilder b = new StringBuilder(""); for (IngestMessage m : messages) { String details = m.getDetails(); @@ -751,7 +751,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { * return color corresp to priority * @return */ - Color getColor() { + synchronized Color getColor() { int count = messages.size(); if (count == 1) { return VERY_HIGH_PRI_COLOR; @@ -769,7 +769,7 @@ class IngestMessagePanel extends JPanel implements TableModelListener { * used for chrono sort * @return */ - Date getDatePosted() { + synchronized Date getDatePosted() { return messages.get(messages.size() - 1).getDatePosted(); } @@ -777,35 +777,35 @@ class IngestMessagePanel extends JPanel implements TableModelListener { * get subject of the first message * @return */ - String getSubject() { + synchronized String getSubject() { return messages.get(0).getSubject(); } /* * return unique key, should be the same for all msgs */ - String getUniqueKey() { + synchronized String getUniqueKey() { return messages.get(0).getUniqueKey(); } /* * return source module, should be the same for all msgs */ - String getSource() { + synchronized String getSource() { return messages.get(0).getSource(); } /* * return data of the first message */ - BlackboardArtifact getData() { + synchronized BlackboardArtifact getData() { return messages.get(0).getData(); } /* * return message type, should be the same for all msgs */ - IngestMessage.MessageType getMessageType() { + synchronized IngestMessage.MessageType getMessageType() { return messages.get(0).getMessageType(); } } @@ -870,16 +870,17 @@ class IngestMessagePanel extends JPanel implements TableModelListener { cell.setFont(new Font("", Font.PLAIN, 16)); final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row); - MessageType mt = messageGroup.getMessageType(); - if (mt == MessageType.ERROR) { - cell.setBackground(ERROR_COLOR); - } else if (mt == MessageType.WARNING) { - cell.setBackground(Color.orange); - } else { - //cell.setBackground(table.getBackground()); - cell.setBackground(messageGroup.getColor()); + if (messageGroup != null) { + MessageType mt = messageGroup.getMessageType(); + if (mt == MessageType.ERROR) { + cell.setBackground(ERROR_COLOR); + } else if (mt == MessageType.WARNING) { + cell.setBackground(Color.orange); + } else { + //cell.setBackground(table.getBackground()); + cell.setBackground(messageGroup.getColor()); + } } - return cell; } } @@ -910,16 +911,17 @@ class IngestMessagePanel extends JPanel implements TableModelListener { } final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row); - MessageType mt = messageGroup.getMessageType(); - if (mt == MessageType.ERROR) { - cell.setBackground(ERROR_COLOR); - } else if (mt == MessageType.WARNING) { - cell.setBackground(Color.orange); - } else { - //cell.setBackground(table.getBackground()); - cell.setBackground(messageGroup.getColor()); + if (messageGroup != null) { + MessageType mt = messageGroup.getMessageType(); + if (mt == MessageType.ERROR) { + cell.setBackground(ERROR_COLOR); + } else if (mt == MessageType.WARNING) { + cell.setBackground(Color.orange); + } else { + //cell.setBackground(table.getBackground()); + cell.setBackground(messageGroup.getColor()); + } } - return cell; } } @@ -945,14 +947,16 @@ class IngestMessagePanel extends JPanel implements TableModelListener { Component cell = super.getTableCellRendererComponent(table, aValue, isSelected, hasFocus, row, column); final IngestMessageGroup messageGroup = tableModel.getMessageGroup(row); - MessageType mt = messageGroup.getMessageType(); - if (mt == MessageType.ERROR) { - cell.setBackground(ERROR_COLOR); - } else if (mt == MessageType.WARNING) { - cell.setBackground(Color.orange); - } else { - //cell.setBackground(table.getBackground()); - cell.setBackground(messageGroup.getColor()); + if (messageGroup != null) { + MessageType mt = messageGroup.getMessageType(); + if (mt == MessageType.ERROR) { + cell.setBackground(ERROR_COLOR); + } else if (mt == MessageType.WARNING) { + cell.setBackground(Color.orange); + } else { + //cell.setBackground(table.getBackground()); + cell.setBackground(messageGroup.getColor()); + } } return cell; @@ -986,9 +990,11 @@ class IngestMessagePanel extends JPanel implements TableModelListener { messageTable.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); //check if has details IngestMessageGroup m = getMessageGroup(selected); - String details = m.getDetails(); - if (details != null && !details.equals("")) { - mainPanel.showDetails(selected); + if (m != null) { + String details = m.getDetails(); + if (details != null && !details.equals("")) { + mainPanel.showDetails(selected); + } } messageTable.setCursor(null); } From dc5624916e1c71e82ca271c341cd2bd25b6c6ad2 Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Wed, 9 Apr 2014 17:38:59 -0400 Subject: [PATCH 20/27] removed IngestMessageGroup.getMessages() --- .../org/sleuthkit/autopsy/ingest/IngestMessagePanel.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index 74a0901004..36ffe035dd 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -713,17 +713,13 @@ class IngestMessagePanel extends JPanel implements TableModelListener { messages.add(message); } - synchronized List getMessages() { - return messages; - } - synchronized void add(IngestMessage message) { messages.add(message); } //add all messages from another group synchronized void addAll(IngestMessageGroup group) { - for (IngestMessage m : group.getMessages()) { + for (IngestMessage m : messages) { messages.add(m); } } From e8773786f452d52a3493cdfc3fc1490764dfa9e5 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 12:58:45 -0400 Subject: [PATCH 21/27] Added overwrite flag to PlatformUtil resource file extraction method, use for pipeline config --- .../autopsy/coreutils/PlatformUtil.java | 4 +- .../sleuthkit/autopsy/coreutils/XMLUtil.java | 2 +- .../ingest/IngestPipelinesConfiguration.java | 130 ++++++++++-------- .../fileextmismatch/FileExtMismatchXML.java | 2 +- .../SearchEngineURLQueryAnalyzer.java | 2 +- .../recentactivity/UsbDeviceIdMapper.java | 2 +- .../scalpel/ScalpelCarverIngestModule.java | 2 +- git-daemon-export-okay | 0 8 files changed, 80 insertions(+), 64 deletions(-) create mode 100644 git-daemon-export-okay diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java index d32234fa92..d3258e5885 100644 --- a/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java +++ b/Core/src/org/sleuthkit/autopsy/coreutils/PlatformUtil.java @@ -208,11 +208,11 @@ public class PlatformUtil { * @throws IOException exception thrown if extract the file failed for IO * reasons */ - public static boolean extractResourceToUserConfigDir(final Class resourceClass, final String resourceFile) throws IOException { + public static boolean extractResourceToUserConfigDir(final Class resourceClass, final String resourceFile, boolean overWrite) throws IOException { final File userDir = new File(getUserConfigDirectory()); final File resourceFileF = new File(userDir + File.separator + resourceFile); - if (resourceFileF.exists()) { + if (resourceFileF.exists() && !overWrite) { return false; } diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java b/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java index 2a19075913..ed401bc5c3 100644 --- a/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java +++ b/Core/src/org/sleuthkit/autopsy/coreutils/XMLUtil.java @@ -70,7 +70,7 @@ public class XMLUtil { */ public static boolean xmlIsValid(DOMSource xmlfile, Class clazz, String schemaFile) { try{ - PlatformUtil.extractResourceToUserConfigDir(clazz, schemaFile); + PlatformUtil.extractResourceToUserConfigDir(clazz, schemaFile, false); File schemaLoc = new File(PlatformUtil.getUserConfigDirectory() + File.separator + schemaFile); SchemaFactory schm = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); try{ diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java index 5c93b9d102..7ac7e4f238 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java @@ -24,6 +24,7 @@ import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import org.sleuthkit.autopsy.coreutils.Logger; +import org.sleuthkit.autopsy.coreutils.ModuleSettings; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.XMLUtil; import org.w3c.dom.Document; @@ -39,12 +40,15 @@ import org.w3c.dom.NodeList; final class IngestPipelinesConfiguration { private static final Logger logger = Logger.getLogger(IngestPipelinesConfiguration.class.getName()); - private final static String PIPELINES_CONFIG_FILE = "pipeline_config.xml"; - private final static String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd"; + private static final String PIPELINE_CONFIG_FILE_VERSION_KEY = "PipelineConfigFileVersion"; + private static final String PIPELINE_CONFIG_FILE_VERSION_NO_STRING = "1"; + private static final int PIPELINE_CONFIG_FILE_VERSION_NO = 1; + private static final String PIPELINES_CONFIG_FILE = "pipeline_config.xml"; + private static final String PIPELINES_CONFIG_FILE_XSD = "PipelineConfigSchema.xsd"; private static final String XML_PIPELINE_ELEM = "PIPELINE"; private static final String XML_PIPELINE_TYPE_ATTR = "type"; - private final static String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis"; - private final static String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis"; + private static final String DATA_SOURCE_INGEST_PIPELINE_TYPE = "ImageAnalysis"; + private static final String FILE_INGEST_PIPELINE_TYPE = "FileAnalysis"; private static final String XML_MODULE_ELEM = "MODULE"; private static final String XML_MODULE_CLASS_NAME_ATTR = "location"; private static IngestPipelinesConfiguration instance; @@ -73,65 +77,77 @@ final class IngestPipelinesConfiguration { private void readPipelinesConfigurationFile() { try { - PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE); - } catch (IOException ex) { - logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir", ex); - return; - } - - String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE; - Document doc = XMLUtil.loadDoc(IngestPipelinesConfiguration.class, configFilePath, PIPELINES_CONFIG_FILE_XSD); - if (doc == null) { - return; - } - - Element rootElement = doc.getDocumentElement(); - if (rootElement == null) { - logger.log(Level.SEVERE, "Invalid pipelines config file"); - return; - } - - NodeList pipelineElements = rootElement.getElementsByTagName(XML_PIPELINE_ELEM); - int numPipelines = pipelineElements.getLength(); - if (numPipelines < 1 || numPipelines > 2) { - logger.log(Level.SEVERE, "Invalid pipelines config file"); - return; - } - - List pipelineConfig = null; - for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { - Element pipelineElement = (Element) pipelineElements.item(pipelineNum); - String pipelineTypeAttr = pipelineElement.getAttribute(XML_PIPELINE_TYPE_ATTR); - if (pipelineTypeAttr != null) { - switch (pipelineTypeAttr) { - case DATA_SOURCE_INGEST_PIPELINE_TYPE: - pipelineConfig = dataSourceIngestPipelineConfig; - break; - case FILE_INGEST_PIPELINE_TYPE: - pipelineConfig = fileIngestPipelineConfig; - break; - default: - logger.log(Level.SEVERE, "Invalid pipelines config file"); - return; - } + boolean overWrite; + if (!ModuleSettings.settingExists(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY)) { + ModuleSettings.setConfigSetting(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY, PIPELINE_CONFIG_FILE_VERSION_NO_STRING); + overWrite = true; + } else { + int versionNumber = Integer.parseInt(ModuleSettings.getConfigSetting(this.getClass().getSimpleName(), PIPELINE_CONFIG_FILE_VERSION_KEY)); + overWrite = versionNumber < PIPELINE_CONFIG_FILE_VERSION_NO; + // TODO: Migrate user edits } - // Create an ordered list of class names. The sequence of class - // names defines the sequence of modules in the pipeline. - if (pipelineConfig != null) { - NodeList modulesElems = pipelineElement.getElementsByTagName(XML_MODULE_ELEM); - int numModules = modulesElems.getLength(); - if (numModules == 0) { - break; + boolean fileCopied = PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE, overWrite); + if (!fileCopied) { + logger.log(Level.SEVERE, "Failure copying default pipeline configuration to user dir"); + } + + String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE; + Document doc = XMLUtil.loadDoc(IngestPipelinesConfiguration.class, configFilePath, PIPELINES_CONFIG_FILE_XSD); + if (doc == null) { + return; + } + + Element rootElement = doc.getDocumentElement(); + if (rootElement == null) { + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } + + NodeList pipelineElements = rootElement.getElementsByTagName(XML_PIPELINE_ELEM); + int numPipelines = pipelineElements.getLength(); + if (numPipelines < 1 || numPipelines > 2) { + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } + + List pipelineConfig = null; + for (int pipelineNum = 0; pipelineNum < numPipelines; ++pipelineNum) { + Element pipelineElement = (Element) pipelineElements.item(pipelineNum); + String pipelineTypeAttr = pipelineElement.getAttribute(XML_PIPELINE_TYPE_ATTR); + if (pipelineTypeAttr != null) { + switch (pipelineTypeAttr) { + case DATA_SOURCE_INGEST_PIPELINE_TYPE: + pipelineConfig = dataSourceIngestPipelineConfig; + break; + case FILE_INGEST_PIPELINE_TYPE: + pipelineConfig = fileIngestPipelineConfig; + break; + default: + logger.log(Level.SEVERE, "Invalid pipelines config file"); + return; + } } - for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { - Element moduleElement = (Element) modulesElems.item(moduleNum); - final String moduleClassName = moduleElement.getAttribute(XML_MODULE_CLASS_NAME_ATTR); - if (moduleClassName != null) { - pipelineConfig.add(moduleClassName); + + // Create an ordered list of class names. The sequence of class + // names defines the sequence of modules in the pipeline. + if (pipelineConfig != null) { + NodeList modulesElems = pipelineElement.getElementsByTagName(XML_MODULE_ELEM); + int numModules = modulesElems.getLength(); + if (numModules == 0) { + break; + } + for (int moduleNum = 0; moduleNum < numModules; ++moduleNum) { + Element moduleElement = (Element) modulesElems.item(moduleNum); + final String moduleClassName = moduleElement.getAttribute(XML_MODULE_CLASS_NAME_ATTR); + if (moduleClassName != null) { + pipelineConfig.add(moduleClassName); + } } } } + } catch (IOException ex) { + logger.log(Level.SEVERE, "Error copying default pipeline configuration to user dir", ex); } } } diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchXML.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchXML.java index 10b3319eb8..d90802a69b 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchXML.java +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchXML.java @@ -61,7 +61,7 @@ class FileExtMismatchXML { this.filePath = filePath; try { - boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME); + boolean extracted = PlatformUtil.extractResourceToUserConfigDir(FileExtMismatchXML.class, DEFAULT_CONFIG_FILE_NAME, false); } catch (IOException ex) { logger.log(Level.SEVERE, "Error copying default mismatch configuration to user dir ", ex); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index b57eb19a66..a175a7d2c4 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -322,7 +322,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { @Override void init() throws IngestModuleException { try { - PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE); + PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE, false); init2(); } catch (IOException e) { String message = "Unable to find " + XMLFILE; diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/UsbDeviceIdMapper.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/UsbDeviceIdMapper.java index 9870b9af1d..b7b8abc56d 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/UsbDeviceIdMapper.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/UsbDeviceIdMapper.java @@ -87,7 +87,7 @@ class UsbDeviceIdMapper { */ private void loadDeviceMap() throws FileNotFoundException, IOException { devices = new HashMap<>(); - PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile); + PlatformUtil.extractResourceToUserConfigDir(this.getClass(), DataFile, false); try (Scanner dat = new Scanner(new FileInputStream(new java.io.File(PlatformUtil.getUserConfigDirectory() + File.separator + "USB_DATA.txt")))) { /* Syntax of file: * diff --git a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java index 5a0bc208a6..8b2211a36b 100644 --- a/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java +++ b/ScalpelCarver/src/org/sleuthkit/autopsy/scalpel/ScalpelCarverIngestModule.java @@ -100,7 +100,7 @@ class ScalpelCarverIngestModule extends IngestModuleAdapter implements FileInges // copy the default config file to the user's home directory if one // is not already there try { - PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName); + PlatformUtil.extractResourceToUserConfigDir(this.getClass(), configFileName, false); } catch (IOException ex) { String message = "Could not obtain the path to the Scalpel configuration file."; logger.log(Level.SEVERE, message, ex); diff --git a/git-daemon-export-okay b/git-daemon-export-okay new file mode 100644 index 0000000000..e69de29bb2 From aaacbc944d40b08d343792a8db9c19b8c27d580c Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 14:28:00 -0400 Subject: [PATCH 22/27] Fixed pipeline config bug from misunderstanding of PlatformUtil method --- .../autopsy/ingest/IngestPipelinesConfiguration.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java index 7ac7e4f238..b834d13f62 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestPipelinesConfiguration.java @@ -86,11 +86,7 @@ final class IngestPipelinesConfiguration { overWrite = versionNumber < PIPELINE_CONFIG_FILE_VERSION_NO; // TODO: Migrate user edits } - - boolean fileCopied = PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE, overWrite); - if (!fileCopied) { - logger.log(Level.SEVERE, "Failure copying default pipeline configuration to user dir"); - } + PlatformUtil.extractResourceToUserConfigDir(IngestPipelinesConfiguration.class, PIPELINES_CONFIG_FILE, overWrite); String configFilePath = PlatformUtil.getUserConfigDirectory() + File.separator + PIPELINES_CONFIG_FILE; Document doc = XMLUtil.loadDoc(IngestPipelinesConfiguration.class, configFilePath, PIPELINES_CONFIG_FILE_XSD); From 1431d12a9004e4129d28a29256d5e6d10fae522d Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 17:10:08 -0400 Subject: [PATCH 23/27] Eliminated duplication of ingest job cancellation check methods --- .../SampleDataSourceIngestModule.java | 4 +- .../ingest/DataSourceIngestModule.java | 2 +- ...va => DataSourceIngestModuleProgress.java} | 20 +---- .../ingest/DataSourceIngestPipeline.java | 4 +- .../autopsy/recentactivity/Chrome.java | 48 +++++------ .../autopsy/recentactivity/Extract.java | 6 +- .../autopsy/recentactivity/ExtractIE.java | 44 +++++----- .../recentactivity/ExtractRegistry.java | 29 ++++--- .../autopsy/recentactivity/Firefox.java | 82 ++++++++----------- .../recentactivity/RAImageIngestModule.java | 17 ++-- .../recentactivity/RecentDocumentsByLnk.java | 21 +++-- .../SearchEngineURLQueryAnalyzer.java | 21 +++-- .../ewfverify/EwfVerifyIngestModule.java | 8 +- 13 files changed, 144 insertions(+), 162 deletions(-) rename Core/src/org/sleuthkit/autopsy/ingest/{DataSourceIngestModuleStatusHelper.java => DataSourceIngestModuleProgress.java} (74%) diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java index b4be1df416..108adb2100 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java @@ -35,7 +35,7 @@ import java.util.logging.Level; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.casemodule.services.Services; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.Content; @@ -118,7 +118,7 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { * @return A result code indicating success or failure of the processing. */ @Override - public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { // There are two tasks to do. Use the status helper to set the the // progress bar to determinate and to set the remaining number of work // units to be completed. diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java index 23bd4ee807..b94c037e08 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModule.java @@ -35,5 +35,5 @@ public interface DataSourceIngestModule extends IngestModule { * detect ingest job cancellation. * @return A result code indicating success or failure of the processing. */ - ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper); + ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper); } \ No newline at end of file diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java similarity index 74% rename from Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java rename to Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java index 33474e8587..39fbca98fe 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleStatusHelper.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestModuleProgress.java @@ -18,33 +18,19 @@ */ package org.sleuthkit.autopsy.ingest; -import org.netbeans.api.progress.ProgressHandle; - /** - * Used by data source ingest modules to report progress and detect data source - * ingest job cancellation. + * Used by data source ingest modules to report progress. */ -public class DataSourceIngestModuleStatusHelper { +public class DataSourceIngestModuleProgress { private final IngestJob ingestJob; private final String moduleDisplayName; - DataSourceIngestModuleStatusHelper(IngestJob ingestJob, String moduleDisplayName) { + DataSourceIngestModuleProgress(IngestJob ingestJob, String moduleDisplayName) { this.ingestJob = ingestJob; this.moduleDisplayName = moduleDisplayName; } - /** - * Checks for ingest job cancellation. This should be polled by the module - * in its process() method. If the ingest task is canceled, the module - * should return from its process() method as quickly as possible. - * - * @return True if the task has been canceled, false otherwise. - */ - public boolean isIngestJobCancelled() { - return (ingestJob.isCancelled()); - } - /** * Updates the progress bar and switches it to determinate mode. This should * be called by the module as soon as the number of total work units diff --git a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java index bac11238b5..fb349f249c 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/DataSourceIngestPipeline.java @@ -79,7 +79,7 @@ final class DataSourceIngestPipeline { List errors = new ArrayList<>(); for (DataSourceIngestModuleDecorator module : this.modules) { try { - module.process(job.getDataSource(), new DataSourceIngestModuleStatusHelper(job, module.getDisplayName())); + module.process(job.getDataSource(), new DataSourceIngestModuleProgress(job, module.getDisplayName())); } catch (Exception ex) { errors.add(new IngestModuleError(module.getDisplayName(), ex)); } @@ -126,7 +126,7 @@ final class DataSourceIngestPipeline { } @Override - public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + public IngestModule.ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { return module.process(dataSource, statusHelper); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java index 8554fe04a7..b0b8cae007 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java @@ -39,7 +39,7 @@ import java.io.FileReader; import java.io.IOException; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -62,27 +62,29 @@ class Chrome extends Extract { private static final String downloadQueryVersion30 = "SELECT current_path as full_path, url, start_time, received_bytes FROM downloads, downloads_url_chains WHERE downloads.id=downloads_url_chains.id"; private static final String loginQuery = "select origin_url, username_value, signon_realm from logins"; private final Logger logger = Logger.getLogger(this.getClass().getName()); + private Content dataSource; + private IngestJobContext context; Chrome() { moduleName = NbBundle.getMessage(Chrome.class, "Chrome.moduleName"); } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; dataFound = false; - this.getHistory(dataSource, statusHelper); - this.getBookmark(dataSource, statusHelper); - this.getCookie(dataSource, statusHelper); - this.getLogin(dataSource, statusHelper); - this.getDownload(dataSource, statusHelper); + this.getHistory(); + this.getBookmark(); + this.getCookie(); + this.getLogin(); + this.getDownload(); } /** * Query for history databases and add artifacts - * @param dataSource - * @param controller */ - private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getHistory() { FileManager fileManager = currentCase.getServices().getFileManager(); List historyFiles; try { @@ -126,7 +128,7 @@ class Chrome extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } @@ -164,10 +166,8 @@ class Chrome extends Extract { /** * Search for bookmark files and make artifacts. - * @param dataSource - * @param controller */ - private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getBookmark() { FileManager fileManager = currentCase.getServices().getFileManager(); List bookmarkFiles = null; try { @@ -204,7 +204,7 @@ class Chrome extends Extract { logger.log(Level.INFO, "{0}- Now getting Bookmarks from {1}", new Object[]{moduleName, temps}); File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } @@ -305,10 +305,8 @@ class Chrome extends Extract { /** * Queries for cookie files and adds artifacts - * @param dataSource - * @param controller */ - private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getCookie() { FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles; @@ -344,7 +342,7 @@ class Chrome extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } @@ -383,10 +381,8 @@ class Chrome extends Extract { /** * Queries for download files and adds artifacts - * @param dataSource - * @param controller */ - private void getDownload(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getDownload() { FileManager fileManager = currentCase.getServices().getFileManager(); List downloadFiles = null; try { @@ -420,7 +416,7 @@ class Chrome extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } @@ -473,10 +469,8 @@ class Chrome extends Extract { /** * Queries for login files and adds artifacts - * @param dataSource - * @param controller */ - private void getLogin(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getLogin() { FileManager fileManager = currentCase.getServices().getFileManager(); List signonFiles; try { @@ -511,7 +505,7 @@ class Chrome extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java index a312d81739..7a22ac6a8c 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Extract.java @@ -32,7 +32,7 @@ import java.util.logging.Level; import org.openide.util.NbBundle; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.casemodule.Case; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.datamodel.*; @@ -40,7 +40,7 @@ abstract class Extract { protected Case currentCase = Case.getCurrentCase(); protected SleuthkitCase tskCase = currentCase.getSleuthkitCase(); - public final Logger logger = Logger.getLogger(this.getClass().getName()); + private final Logger logger = Logger.getLogger(this.getClass().getName()); private final ArrayList errorMessages = new ArrayList<>(); String moduleName = ""; boolean dataFound = false; @@ -51,7 +51,7 @@ abstract class Extract { void init() throws IngestModuleException { } - abstract void process(Content dataSource, DataSourceIngestModuleStatusHelper controller); + abstract void process(Content dataSource, IngestJobContext context); void complete() { } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java index b1168dba98..21875852ee 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java @@ -52,7 +52,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; import org.sleuthkit.autopsy.coreutils.PlatformUtil; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.datamodel.*; /** @@ -66,6 +66,8 @@ class ExtractIE extends Extract { private String JAVA_PATH; private static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"); private ExecUtil execPasco; + private Content dataSource; + private IngestJobContext context; ExtractIE() { moduleName = NbBundle.getMessage(ExtractIE.class, "ExtractIE.moduleName.text"); @@ -74,19 +76,19 @@ class ExtractIE extends Extract { } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; dataFound = false; - this.getBookmark(dataSource, controller); - this.getCookie(dataSource, controller); - this.getHistory(dataSource, controller); + this.getBookmark(); + this.getCookie(); + this.getHistory(); } /** * Finds the files storing bookmarks and creates artifacts - * @param dataSource - * @param controller */ - private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getBookmark() { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List favoritesFiles; try { @@ -110,7 +112,7 @@ class ExtractIE extends Extract { continue; } - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } @@ -180,10 +182,8 @@ class ExtractIE extends Extract { /** * Finds files that store cookies and adds artifacts for them. - * @param dataSource - * @param controller */ - private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getCookie() { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List cookiesFiles; try { @@ -202,7 +202,7 @@ class ExtractIE extends Extract { dataFound = true; for (AbstractFile cookiesFile : cookiesFiles) { - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } if (cookiesFile.getSize() == 0) { @@ -257,11 +257,9 @@ class ExtractIE extends Extract { /** * Locates index.dat files, runs Pasco on them, and creates artifacts. - * @param dataSource - * @param controller */ - private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - logger.log(Level.INFO, "Pasco results path: " + moduleTempResultsDir); + private void getHistory() { + logger.log(Level.INFO, "Pasco results path: {0}", moduleTempResultsDir); boolean foundHistory = false; final File pascoRoot = InstalledFileLocator.getDefault().locate("pasco2", ExtractIE.class.getPackage().getName(), false); @@ -273,7 +271,7 @@ class ExtractIE extends Extract { } final String pascoHome = pascoRoot.getAbsolutePath(); - logger.log(Level.INFO, "Pasco2 home: " + pascoHome); + logger.log(Level.INFO, "Pasco2 home: {0}", pascoHome); PASCO_LIB_PATH = pascoHome + File.separator + "pasco2.jar" + File.pathSeparator + pascoHome + File.separator + "*"; @@ -283,7 +281,7 @@ class ExtractIE extends Extract { // get index.dat files org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); - List indexFiles = null; + List indexFiles; try { indexFiles = fileManager.findFiles(dataSource, "index.dat"); } catch (TskCoreException ex) { @@ -312,7 +310,7 @@ class ExtractIE extends Extract { //indexFileName = "index" + Long.toString(bbart.getArtifactID()) + ".dat"; temps = RAImageIngestModule.getRATempPath(currentCase, "IE") + File.separator + indexFileName; File datFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } try { @@ -337,7 +335,7 @@ class ExtractIE extends Extract { //Delete index.dat file since it was succcessfully by Pasco datFile.delete(); } else { - logger.log(Level.WARNING, "pasco execution failed on: " + this.getName()); + logger.log(Level.WARNING, "pasco execution failed on: {0}", this.getName()); this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractIE.getHistory.errMsg.errProcHist", this.getName())); } @@ -361,7 +359,7 @@ class ExtractIE extends Extract { Writer writer = null; try { final String outputFileFullPath = moduleTempResultsDir + File.separator + outputFileName; - logger.log(Level.INFO, "Writing pasco results to: " + outputFileFullPath); + logger.log(Level.INFO, "Writing pasco results to: {0}", outputFileFullPath); writer = new FileWriter(outputFileFullPath); execPasco = new ExecUtil(); execPasco.execute(writer, JAVA_PATH, @@ -402,7 +400,7 @@ class ExtractIE extends Extract { this.addErrorMessage( NbBundle.getMessage(this.getClass(), "ExtractIE.parsePascoOutput.errMsg.notFound", this.getName(), file.getName())); - logger.log(Level.WARNING, "Pasco Output not found: " + file.getPath()); + logger.log(Level.WARNING, "Pasco Output not found: {0}", file.getPath()); return; } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java index 9dc92d2ae7..2e3cadcde9 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java @@ -37,7 +37,7 @@ import org.sleuthkit.autopsy.coreutils.ExecUtil; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.recentactivity.UsbDeviceIdMapper.USBInfo; import org.sleuthkit.datamodel.*; import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE; @@ -64,6 +64,8 @@ class ExtractRegistry extends Extract { private boolean rrFullFound = false; // true if we found the full version of regripper final private static String MODULE_VERSION = "1.0"; private ExecUtil execRR; + private Content dataSource; + private IngestJobContext context; //hide public constructor to prevent from instantiation by ingest module loader ExtractRegistry() { @@ -77,7 +79,7 @@ class ExtractRegistry extends Extract { } final String rrHome = rrRoot.getAbsolutePath(); - logger.log(Level.INFO, "RegRipper home: " + rrHome); + logger.log(Level.INFO, "RegRipper home: {0}", rrHome); if (PlatformUtil.isWindowsOS()) { RR_PATH = rrHome + File.separator + "rip.exe"; @@ -94,7 +96,7 @@ class ExtractRegistry extends Extract { } final String rrFullHome = rrFullRoot.getAbsolutePath(); - logger.log(Level.INFO, "RegRipper Full home: " + rrFullHome); + logger.log(Level.INFO, "RegRipper Full home: {0}", rrFullHome); if (PlatformUtil.isWindowsOS()) { RR_FULL_PATH = rrFullHome + File.separator + "rip.exe"; @@ -105,10 +107,8 @@ class ExtractRegistry extends Extract { /** * Search for the registry hives on the system. - * @param dataSource Data source to search for hives in. - * @return List of registry hives */ - private List findRegistryFiles(Content dataSource) { + private List findRegistryFiles() { List allRegistryFiles = new ArrayList<>(); org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); @@ -138,12 +138,9 @@ class ExtractRegistry extends Extract { /** * Identifies registry files in the database by mtimeItem, runs regripper on them, and parses the output. - * - * @param dataSource - * @param controller */ - private void analyzeRegistryFiles(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - List allRegistryFiles = findRegistryFiles(dataSource); + private void analyzeRegistryFiles() { + List allRegistryFiles = findRegistryFiles(); // open the log file FileWriter logFile = null; @@ -171,7 +168,7 @@ class ExtractRegistry extends Extract { continue; } - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } @@ -187,7 +184,7 @@ class ExtractRegistry extends Extract { logger.log(Level.INFO, moduleName + "- Now getting registry information from " + regFileNameLocal); RegOutputFiles regOutputFiles = executeRegRip(regFileNameLocal, outputPathBase); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } @@ -572,8 +569,10 @@ class ExtractRegistry extends Extract { } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - analyzeRegistryFiles(dataSource, controller); + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; + analyzeRegistryFiles(); } @Override diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 9cfd1f30ef..1f0f710dcc 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -36,7 +36,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.services.FileManager; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -52,6 +52,7 @@ import org.sleuthkit.datamodel.TskCoreException; */ class Firefox extends Extract { + private static final Logger logger = Logger.getLogger(Firefox.class.getName()); private static final String historyQuery = "SELECT moz_historyvisits.id,url,title,visit_count,(visit_date/1000000) as visit_date,from_visit,(SELECT url FROM moz_places WHERE id=moz_historyvisits.from_visit) as ref FROM moz_places, moz_historyvisits WHERE moz_places.id = moz_historyvisits.place_id AND hidden = 0"; private static final String cookieQuery = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed,(creationTime/1000000) as creationTime FROM moz_cookies"; private static final String cookieQueryV3 = "SELECT name,value,host,expiry,(lastAccessed/1000000) as lastAccessed FROM moz_cookies"; @@ -59,21 +60,25 @@ class Firefox extends Extract { private static final String downloadQuery = "SELECT target, source,(startTime/1000000) as startTime, maxBytes FROM moz_downloads"; private static final String downloadQueryVersion24 = "SELECT url, content as target, (lastModified/1000000) as lastModified FROM moz_places, moz_annos WHERE moz_places.id = moz_annos.place_id AND moz_annos.anno_attribute_id = 3"; private final IngestServices services = IngestServices.getInstance(); + private Content dataSource; + private IngestJobContext context; Firefox() { moduleName = NbBundle.getMessage(Firefox.class, "Firefox.moduleName"); } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; dataFound = false; - this.getHistory(dataSource, controller); - this.getBookmark(dataSource, controller); - this.getDownload(dataSource, controller); - this.getCookie(dataSource, controller); + this.getHistory(); + this.getBookmark(); + this.getDownload(); + this.getCookie(); } - private void getHistory(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getHistory() { FileManager fileManager = currentCase.getServices().getFileManager(); List historyFiles; try { @@ -111,14 +116,14 @@ class Firefox extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } List> tempList = this.dbConnect(temps, historyQuery); logger.log(Level.INFO, "{0}- Now getting history from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName.noSpace"), @@ -155,14 +160,11 @@ class Firefox extends Extract { /** * Queries for bookmark files and adds artifacts - * - * @param dataSource - * @param controller */ - private void getBookmark(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getBookmark() { FileManager fileManager = currentCase.getServices().getFileManager(); - List bookmarkFiles = null; + List bookmarkFiles; try { bookmarkFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); } catch (TskCoreException ex) { @@ -195,15 +197,15 @@ class Firefox extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } List> tempList = this.dbConnect(temps, bookmarkQuery); - logger.log(Level.INFO, moduleName + "- Now getting bookmarks from " + temps + " with " + tempList.size() + "artifacts identified."); + logger.log(Level.INFO, "{0}- Now getting bookmarks from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName.noSpace"), @@ -239,13 +241,10 @@ class Firefox extends Extract { /** * Queries for cookies file and adds artifacts - * - * @param dataSource - * @param controller */ - private void getCookie(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getCookie() { FileManager fileManager = currentCase.getServices().getFileManager(); - List cookiesFiles = null; + List cookiesFiles; try { cookiesFiles = fileManager.findFiles(dataSource, "cookies.sqlite", "Firefox"); } catch (TskCoreException ex) { @@ -278,12 +277,12 @@ class Firefox extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } boolean checkColumn = Util.checkColumn("creationTime", "moz_cookies", temps); - String query = null; + String query; if (checkColumn) { query = cookieQuery; } else { @@ -291,10 +290,10 @@ class Firefox extends Extract { } List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, moduleName + "- Now getting cookies from " + temps + " with " + tempList.size() + "artifacts identified."); + logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), NbBundle.getMessage(this.getClass(), "Firefox.parentModuleName.noSpace"), @@ -339,27 +338,21 @@ class Firefox extends Extract { /** * Queries for downloads files and adds artifacts - * - * @param dataSource - * @param controller */ - private void getDownload(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - getDownloadPreVersion24(dataSource, controller); - getDownloadVersion24(dataSource, controller); + private void getDownload() { + getDownloadPreVersion24(); + getDownloadVersion24(); } /** * Finds downloads artifacts from Firefox data from versions before 24.0. * * Downloads were stored in a separate downloads database. - * - * @param dataSource - * @param controller */ - private void getDownloadPreVersion24(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getDownloadPreVersion24() { FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; + List downloadsFiles; try { downloadsFiles = fileManager.findFiles(dataSource, "downloads.sqlite", "Firefox"); } catch (TskCoreException ex) { @@ -392,7 +385,7 @@ class Firefox extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } @@ -464,13 +457,10 @@ class Firefox extends Extract { * Gets download artifacts from Firefox data from version 24. * * Downloads are stored in the places database. - * - * @param dataSource - * @param controller */ - private void getDownloadVersion24(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getDownloadVersion24() { FileManager fileManager = currentCase.getServices().getFileManager(); - List downloadsFiles = null; + List downloadsFiles; try { downloadsFiles = fileManager.findFiles(dataSource, "places.sqlite", "Firefox"); } catch (TskCoreException ex) { @@ -504,17 +494,17 @@ class Firefox extends Extract { continue; } File dbFile = new File(temps); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { dbFile.delete(); break; } List> tempList = this.dbConnect(temps, downloadQueryVersion24); - logger.log(Level.INFO, moduleName + "- Now getting downloads from " + temps + " with " + tempList.size() + "artifacts identified."); + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), NbBundle.getMessage(this.getClass(), diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java index 0d4a0288fa..5c9ea7271b 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RAImageIngestModule.java @@ -30,7 +30,7 @@ import org.openide.util.NbBundle; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; @@ -48,6 +48,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da private final List extracters = new ArrayList<>(); private final List browserExtracters = new ArrayList<>(); private IngestServices services = IngestServices.getInstance(); + private IngestJobContext context; private StringBuilder subCompleted = new StringBuilder(); RAImageIngestModule() { @@ -55,6 +56,8 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da @Override public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; + Extract registry = new ExtractRegistry(); Extract iexplore = new ExtractIE(); Extract recentDocuments = new RecentDocumentsByLnk(); @@ -79,22 +82,22 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da } @Override - public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { services.postMessage(IngestMessage.createMessage(MessageType.INFO, RecentActivityExtracterModuleFactory.getModuleName(), "Started " + dataSource.getName())); - controller.switchToDeterminate(extracters.size()); - controller.progress(0); + progressBar.switchToDeterminate(extracters.size()); + progressBar.progress(0); ArrayList errors = new ArrayList<>(); for (int i = 0; i < extracters.size(); i++) { Extract extracter = extracters.get(i); - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { logger.log(Level.INFO, "Recent Activity has been canceled, quitting before {0}", extracter.getName()); break; } try { - extracter.process(dataSource, controller); + extracter.process(dataSource, context); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception occurred in " + extracter.getName(), ex); subCompleted.append(NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModFailed", @@ -102,7 +105,7 @@ public final class RAImageIngestModule extends IngestModuleAdapter implements Da errors.add( NbBundle.getMessage(this.getClass(), "RAImageIngestModule.process.errModErrs", RecentActivityExtracterModuleFactory.getModuleName())); } - controller.progress(i + 1); + progressBar.progress(i + 1); errors.addAll(extracter.getErrorMessages()); } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java index 47827a7ce0..bfa52c250a 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/RecentDocumentsByLnk.java @@ -32,7 +32,8 @@ import java.util.Collection; import org.sleuthkit.autopsy.coreutils.JLNK; import org.sleuthkit.autopsy.coreutils.JLnkParser; import org.sleuthkit.autopsy.coreutils.JLnkParserException; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.BlackboardArtifact; @@ -48,14 +49,16 @@ import org.sleuthkit.datamodel.*; */ class RecentDocumentsByLnk extends Extract { private static final Logger logger = Logger.getLogger(RecentDocumentsByLnk.class.getName()); - private IngestServices services = IngestServices.getInstance(); + private IngestServices services = IngestServices.getInstance(); + private Content dataSource; + private IngestJobContext context; /** * Find the documents that Windows stores about recent documents and make artifacts. * @param dataSource * @param controller */ - private void getRecentDocuments(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getRecentDocuments() { org.sleuthkit.autopsy.casemodule.services.FileManager fileManager = currentCase.getServices().getFileManager(); List recentFiles; @@ -76,14 +79,14 @@ class RecentDocumentsByLnk extends Extract { dataFound = true; for (AbstractFile recentFile : recentFiles) { - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break; } if (recentFile.getSize() == 0) { continue; } - JLNK lnk = null; + JLNK lnk; JLnkParser lnkParser = new JLnkParser(new ReadContentInputStream(recentFile), (int) recentFile.getSize()); try { lnk = lnkParser.parse(); @@ -100,7 +103,7 @@ class RecentDocumentsByLnk extends Extract { continue; } - Collection bbattributes = new ArrayList(); + Collection bbattributes = new ArrayList<>(); String path = lnk.getBestPath(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PATH.getTypeID(), NbBundle.getMessage(this.getClass(), @@ -122,8 +125,10 @@ class RecentDocumentsByLnk extends Extract { } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; dataFound = false; - this.getRecentDocuments(dataSource, controller); + this.getRecentDocuments(); } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index a175a7d2c4..6bdd730345 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -31,11 +31,11 @@ import java.util.logging.Level; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; -import org.sleuthkit.autopsy.coreutils.Logger; import org.openide.util.NbBundle; +import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.coreutils.PlatformUtil; import org.sleuthkit.autopsy.coreutils.XMLUtil; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; @@ -62,6 +62,7 @@ import org.xml.sax.SAXException; */ class SearchEngineURLQueryAnalyzer extends Extract { + private static final Logger logger = Logger.getLogger(SearchEngineURLQueryAnalyzer.class.getName()); private static final String XMLFILE = "SEUQAMappings.xml"; private static final String XSDFILE = "SearchEngineSchema.xsd"; private static String[] searchEngineNames; @@ -71,6 +72,8 @@ class SearchEngineURLQueryAnalyzer extends Extract { NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.engineName.none"), NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.domainSubStr.none"), new HashMap()); + private Content dataSource; + private IngestJobContext context; SearchEngineURLQueryAnalyzer() { } @@ -223,7 +226,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { return basereturn; } - private void getURLs(Content dataSource, DataSourceIngestModuleStatusHelper controller) { + private void getURLs() { int totalQueries = 0; try { //from blackboard_artifacts @@ -255,7 +258,7 @@ class SearchEngineURLQueryAnalyzer extends Extract { Collection listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID()); getAttributes: for (BlackboardAttribute attribute : listAttributes) { - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { break getAll; //User cancled the process. } if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) { @@ -292,13 +295,13 @@ class SearchEngineURLQueryAnalyzer extends Extract { } catch (TskException e) { logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); } finally { - if (controller.isIngestJobCancelled()) { + if (context.isJobCancelled()) { logger.info("Operation terminated by user."); } IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent( NbBundle.getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.parentModuleName.noSpace"), BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY)); - logger.info("Extracted " + totalQueries + " queries from the blackboard"); + logger.log(Level.INFO, "Extracted {0} queries from the blackboard", totalQueries); } } @@ -314,8 +317,10 @@ class SearchEngineURLQueryAnalyzer extends Extract { } @Override - public void process(Content dataSource, DataSourceIngestModuleStatusHelper controller) { - this.getURLs(dataSource, controller); + public void process(Content dataSource, IngestJobContext context) { + this.dataSource = dataSource; + this.context = context; + this.getURLs(); logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals()); } diff --git a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java index 05b1a74948..5d039fe304 100755 --- a/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java +++ b/ewfVerify/src/org/sleuthkit/autopsy/ewfverify/EwfVerifyIngestModule.java @@ -24,7 +24,7 @@ import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.DatatypeConverter; import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; -import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleStatusHelper; +import org.sleuthkit.autopsy.ingest.DataSourceIngestModuleProgress; import org.sleuthkit.autopsy.ingest.IngestMessage; import org.sleuthkit.autopsy.ingest.IngestMessage.MessageType; import org.sleuthkit.autopsy.ingest.IngestServices; @@ -53,12 +53,14 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo private boolean skipped = false; private String calculatedHash = ""; private String storedHash = ""; + private IngestJobContext context; EwfVerifyIngestModule() { } @Override public void startUp(IngestJobContext context) throws IngestModuleException { + this.context = context; verified = false; skipped = false; img = null; @@ -79,7 +81,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo } @Override - public ProcessResult process(Content dataSource, DataSourceIngestModuleStatusHelper statusHelper) { + public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { imgName = dataSource.getName(); try { img = dataSource.getImage(); @@ -145,7 +147,7 @@ public class EwfVerifyIngestModule extends IngestModuleAdapter implements DataSo // Read in byte size chunks and update the hash value with the data. for (int i = 0; i < totalChunks; i++) { - if (statusHelper.isIngestJobCancelled()) { + if (context.isJobCancelled()) { return ProcessResult.OK; } data = new byte[(int) chunkSize]; From fa6ab7a1571d42804d198cbb2f6e50b2dfa1e95f Mon Sep 17 00:00:00 2001 From: "Samuel H. Kenyon" Date: Thu, 10 Apr 2014 17:11:43 -0400 Subject: [PATCH 24/27] fix mistake made during recent changes to IngestMessagePanel (which can also cause a ConcurrentModificationException) --- .../org/sleuthkit/autopsy/ingest/IngestMessagePanel.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java index 36ffe035dd..24dae0f312 100644 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestMessagePanel.java @@ -713,13 +713,17 @@ class IngestMessagePanel extends JPanel implements TableModelListener { messages.add(message); } + private List getMessages() { + return messages; + } + synchronized void add(IngestMessage message) { messages.add(message); } //add all messages from another group synchronized void addAll(IngestMessageGroup group) { - for (IngestMessage m : messages) { + for (IngestMessage m : group.getMessages()) { messages.add(m); } } From 708dd975df875a140dac5510b3c177ecdcbbb50f Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 17:19:34 -0400 Subject: [PATCH 25/27] Fixed incorrect auto-format of Firefox(Extracter) log messages --- .../src/org/sleuthkit/autopsy/recentactivity/Firefox.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 1f0f710dcc..9b746aa770 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -121,7 +121,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, historyQuery); - logger.log(Level.INFO, "{0}- Now getting history from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), @@ -202,7 +202,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, bookmarkQuery); - logger.log(Level.INFO, "{0}- Now getting bookmarks from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0}- Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -290,7 +290,7 @@ class Firefox extends Extract { } List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -501,7 +501,7 @@ class Firefox extends Extract { List> tempList = this.dbConnect(temps, downloadQueryVersion24); - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2}artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); From 5d4f5f1636c2bfd74a05c63db19b082e117f6586 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 17:27:35 -0400 Subject: [PATCH 26/27] Corrected log message format errors in recent activity extracter ingest module --- .../src/org/sleuthkit/autopsy/recentactivity/Firefox.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java index 9b746aa770..1208d96080 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java @@ -121,7 +121,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, historyQuery); - logger.log(Level.INFO, "{0}- Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0} - Now getting history from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL.getTypeID(), @@ -202,7 +202,7 @@ class Firefox extends Extract { break; } List> tempList = this.dbConnect(temps, bookmarkQuery); - logger.log(Level.INFO, "{0}- Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0} - Now getting bookmarks from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -290,7 +290,7 @@ class Firefox extends Extract { } List> tempList = this.dbConnect(temps, query); - logger.log(Level.INFO, "{0}- Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0} - Now getting cookies from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); @@ -501,7 +501,7 @@ class Firefox extends Extract { List> tempList = this.dbConnect(temps, downloadQueryVersion24); - logger.log(Level.INFO, "{0}- Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); + logger.log(Level.INFO, "{0} - Now getting downloads from {1} with {2} artifacts identified.", new Object[]{moduleName, temps, tempList.size()}); for (HashMap result : tempList) { Collection bbattributes = new ArrayList<>(); From 723828c98c36d6b6b4af0f31be94bf62c91d1f79 Mon Sep 17 00:00:00 2001 From: Richard Cordovano Date: Thu, 10 Apr 2014 18:24:03 -0400 Subject: [PATCH 27/27] Update sample modules to use ref counting form IngestModuleAdapter --- .../SampleDataSourceIngestModule.java | 120 +++--------------- .../examples/SampleFileIngestModule.java | 100 ++------------- .../autopsy/ingest/IngestModuleAdapter.java | 2 +- .../exif/ExifParserFileIngestModule.java | 2 +- .../FileExtMismatchIngestModule.java | 2 +- .../filetypeid/FileTypeIdIngestModule.java | 2 +- .../sevenzip/SevenZipIngestModule.java | 2 +- .../hashdatabase/HashDbIngestModule.java | 2 +- .../KeywordSearchIngestModule.java | 2 +- 9 files changed, 41 insertions(+), 193 deletions(-) diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java index 108adb2100..a5e329beb5 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleDataSourceIngestModule.java @@ -46,23 +46,17 @@ import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.ingest.DataSourceIngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.datamodel.TskData; /** * Sample data source ingest module that doesn't do much. Demonstrates per * ingest job module settings, use of a subset of the available ingest services - * and thread-safe sharing of per ingest job resources. - *

- * IMPORTANT TIP: This sample data source ingest module directly implements - * DataSourceIngestModule, which extends IngestModule. A practical alternative, - * recommended if you do not need to provide implementations of all of the - * IngestModule methods, is to extend the abstract class IngestModuleAdapter to - * get default "do nothing" implementations of the IngestModule methods. + * and thread-safe sharing of per ingest job data. */ -class SampleDataSourceIngestModule implements DataSourceIngestModule { +class SampleDataSourceIngestModule extends IngestModuleAdapter implements DataSourceIngestModule { - private static final HashMap moduleReferenceCountsForIngestJobs = new HashMap<>(); private static final HashMap fileCountsForIngestJobs = new HashMap<>(); private final boolean skipKnownFiles; private IngestJobContext context = null; @@ -71,58 +65,20 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { this.skipKnownFiles = settings.skipKnownFiles(); } - /** - * Invoked by Autopsy to allow an ingest module instance to set up any - * internal data structures and acquire any private resources it will need - * during an ingest job. - *

- * Autopsy will generally use several instances of an ingest module for each - * ingest job it performs. Completing an ingest job entails processing a - * single data source (e.g., a disk image) and all of the files from the - * data source, including files extracted from archives and any unallocated - * space (made to look like a series of files). The data source is passed - * through one or more pipelines of data source ingest modules. The files - * are passed through one or more pipelines of file ingest modules. - *

- * Autopsy may use multiple threads to complete an ingest job, but it is - * guaranteed that there will be no more than one module instance per - * thread. However, if the module instances must share resources, the - * modules are responsible for synchronizing access to the shared resources - * and doing reference counting as required to release those resources - * correctly. Also, more than one ingest job may be in progress at any given - * time. This must also be taken into consideration when sharing resources - * between module instances. - *

- * An ingest module that does not require initialization may extend the - * abstract IngestModuleAdapter class to get a default "do nothing" - * implementation of this method. - * - * @param context Provides data and services specific to the ingest job and - * the ingest pipeline of which the module is a part. - * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException - */ @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; - // This method is thread-safe with per ingest job reference counting. + // This method is thread-safe with per ingest job reference counted + // management of shared data. initFileCount(context.getJobId()); } - /** - * Processes a data source. - * - * @param dataSource The data source to process. - * @param statusHelper A status helper to be used to report progress and - * detect ingest job cancellation. - * @return A result code indicating success or failure of the processing. - */ @Override - public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress statusHelper) { - // There are two tasks to do. Use the status helper to set the the - // progress bar to determinate and to set the remaining number of work - // units to be completed. - statusHelper.switchToDeterminate(2); + public ProcessResult process(Content dataSource, DataSourceIngestModuleProgress progressBar) { + // There are two tasks to do. Set the the progress bar to determinate + // and set the remaining number of work units to be completed to two. + progressBar.switchToDeterminate(2); Case autopsyCase = Case.getCurrentCase(); SleuthkitCase sleuthkitCase = autopsyCase.getSleuthkitCase(); @@ -138,7 +94,7 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { } } - statusHelper.progress(1); + progressBar.progress(1); // Get files by creation time. long currentTime = System.currentTimeMillis() / 1000; @@ -150,10 +106,12 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { } } - // This method is thread-safe and keeps per ingest job counters. + // This method is thread-safe with per ingest job reference counted + // management of shared data. addToFileCount(context.getJobId(), fileCount); - statusHelper.progress(1); + progressBar.progress(1); + return IngestModule.ProcessResult.OK; } catch (TskCoreException ex) { IngestServices ingestServices = IngestServices.getInstance(); @@ -161,53 +119,20 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { logger.log(Level.SEVERE, "File query failed", ex); return IngestModule.ProcessResult.ERROR; } - - return IngestModule.ProcessResult.OK; } - /** - * Invoked by Autopsy when an ingest job is completed, before the ingest - * module instance is discarded. The module should respond by doing things - * like releasing private resources, submitting final results, and posting a - * final ingest message. - *

- * Autopsy will generally use several instances of an ingest module for each - * ingest job it performs. Completing an ingest job entails processing a - * single data source (e.g., a disk image) and all of the files from the - * data source, including files extracted from archives and any unallocated - * space (made to look like a series of files). The data source is passed - * through one or more pipelines of data source ingest modules. The files - * are passed through one or more pipelines of file ingest modules. - *

- * Autopsy may use multiple threads to complete an ingest job, but it is - * guaranteed that there will be no more than one module instance per - * thread. However, if the module instances must share resources, the - * modules are responsible for synchronizing access to the shared resources - * and doing reference counting as required to release those resources - * correctly. Also, more than one ingest job may be in progress at any given - * time. This must also be taken into consideration when sharing resources - * between module instances. - *

- * An ingest module that does not require initialization may extend the - * abstract IngestModuleAdapter class to get a default "do nothing" - * implementation of this method. - */ @Override public void shutDown(boolean ingestJobCancelled) { - // This method is thread-safe with per ingest job reference counting. + // This method is thread-safe with per ingest job reference counted + // management of shared data. postFileCount(context.getJobId()); } synchronized static void initFileCount(long ingestJobId) { - Integer moduleReferenceCount; - if (!moduleReferenceCountsForIngestJobs.containsKey(ingestJobId)) { - moduleReferenceCount = 1; + Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId); + if (refCount == 1) { fileCountsForIngestJobs.put(ingestJobId, 0L); - } else { - moduleReferenceCount = moduleReferenceCountsForIngestJobs.get(ingestJobId); - ++moduleReferenceCount; } - moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount); } synchronized static void addToFileCount(long ingestJobId, long countToAdd) { @@ -217,9 +142,8 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { } synchronized static void postFileCount(long ingestJobId) { - Integer moduleReferenceCount = moduleReferenceCountsForIngestJobs.remove(ingestJobId); - --moduleReferenceCount; - if (moduleReferenceCount == 0) { + Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId); + if (refCount == 0) { Long filesCount = fileCountsForIngestJobs.remove(ingestJobId); String msgText = String.format("Found %d files", filesCount); IngestMessage message = IngestMessage.createMessage( @@ -227,8 +151,6 @@ class SampleDataSourceIngestModule implements DataSourceIngestModule { SampleIngestModuleFactory.getModuleName(), msgText); IngestServices.getInstance().postMessage(message); - } else { - moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount); - } + } } } diff --git a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java index d16109f18c..95a64ce126 100755 --- a/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/examples/SampleFileIngestModule.java @@ -37,6 +37,7 @@ import org.sleuthkit.autopsy.ingest.FileIngestModule; import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.ingest.IngestJobContext; import org.sleuthkit.autopsy.ingest.IngestMessage; +import org.sleuthkit.autopsy.ingest.IngestModuleAdapter; import org.sleuthkit.autopsy.ingest.IngestServices; import org.sleuthkit.autopsy.ingest.ModuleDataEvent; import org.sleuthkit.datamodel.AbstractFile; @@ -50,17 +51,10 @@ import org.sleuthkit.datamodel.TskData; /** * Sample file ingest module that doesn't do much. Demonstrates per ingest job * module settings, use of a subset of the available ingest services and - * thread-safe sharing of per ingest job resources. - *

- * IMPORTANT TIP: This sample data source ingest module directly implements - * FileIngestModule, which extends IngestModule. A practical alternative, - * recommended if you do not need to provide implementations of all of the - * IngestModule methods, is to extend the abstract class IngestModuleAdapter to - * get default "do nothing" implementations of the IngestModule methods. + * thread-safe sharing of per ingest job data. */ -class SampleFileIngestModule implements FileIngestModule { +class SampleFileIngestModule extends IngestModuleAdapter implements FileIngestModule { - private static final HashMap moduleReferenceCountsForIngestJobs = new HashMap<>(); private static final HashMap artifactCountsForIngestJobs = new HashMap<>(); private static int attrId = -1; private final boolean skipKnownFiles; @@ -70,36 +64,6 @@ class SampleFileIngestModule implements FileIngestModule { this.skipKnownFiles = settings.skipKnownFiles(); } - /** - * Invoked by Autopsy to allow an ingest module instance to set up any - * internal data structures and acquire any private resources it will need - * during an ingest job. - *

- * Autopsy will generally use several instances of an ingest module for each - * ingest job it performs. Completing an ingest job entails processing a - * single data source (e.g., a disk image) and all of the files from the - * data source, including files extracted from archives and any unallocated - * space (made to look like a series of files). The data source is passed - * through one or more pipelines of data source ingest modules. The files - * are passed through one or more pipelines of file ingest modules. - *

- * Autopsy may use multiple threads to complete an ingest job, but it is - * guaranteed that there will be no more than one module instance per - * thread. However, if the module instances must share resources, the - * modules are responsible for synchronizing access to the shared resources - * and doing reference counting as required to release those resources - * correctly. Also, more than one ingest job may be in progress at any given - * time. This must also be taken into consideration when sharing resources - * between module instances. - *

- * An ingest module that does not require initialization may extend the - * abstract IngestModuleAdapter class to get a default "do nothing" - * implementation of this method. - * - * @param context Provides data and services specific to the ingest job and - * the ingest pipeline of which the module is a part. - * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException - */ @Override public void startUp(IngestJobContext context) throws IngestModuleException { this.context = context; @@ -132,16 +96,11 @@ class SampleFileIngestModule implements FileIngestModule { } } - // This method is thread-safe with per ingest job reference counting. + // This method is thread-safe with per ingest job reference counted + // management of shared data. initBlackboardPostCount(context.getJobId()); } - /** - * Processes a file. - * - * @param file The file. - * @return A result code indicating success or failure of the processing. - */ @Override public IngestModule.ProcessResult process(AbstractFile file) { if (attrId != -1) { @@ -182,7 +141,8 @@ class SampleFileIngestModule implements FileIngestModule { BlackboardArtifact art = file.getGenInfoArtifact(); art.addAttribute(attr); - // Thread-safe. + // This method is thread-safe with per ingest job reference counted + // management of shared data. addToBlackboardPostCount(context.getJobId(), 1L); // Fire an event to notify any listeners for blackboard postings. @@ -199,49 +159,18 @@ class SampleFileIngestModule implements FileIngestModule { } } - /** - * Invoked by Autopsy when an ingest job is completed, before the ingest - * module instance is discarded. The module should respond by doing things - * like releasing private resources, submitting final results, and posting a - * final ingest message. - *

- * Autopsy will generally use several instances of an ingest module for each - * ingest job it performs. Completing an ingest job entails processing a - * single data source (e.g., a disk image) and all of the files from the - * data source, including files extracted from archives and any unallocated - * space (made to look like a series of files). The data source is passed - * through one or more pipelines of data source ingest modules. The files - * are passed through one or more pipelines of file ingest modules. - *

- * Autopsy may use multiple threads to complete an ingest job, but it is - * guaranteed that there will be no more than one module instance per - * thread. However, if the module instances must share resources, the - * modules are responsible for synchronizing access to the shared resources - * and doing reference counting as required to release those resources - * correctly. Also, more than one ingest job may be in progress at any given - * time. This must also be taken into consideration when sharing resources - * between module instances. - *

- * An ingest module that does not require initialization may extend the - * abstract IngestModuleAdapter class to get a default "do nothing" - * implementation of this method. - */ @Override public void shutDown(boolean ingestJobCancelled) { - // This method is thread-safe with per ingest job reference counting. + // This method is thread-safe with per ingest job reference counted + // management of shared data. reportBlackboardPostCount(context.getJobId()); } synchronized static void initBlackboardPostCount(long ingestJobId) { - Integer moduleReferenceCount; - if (!moduleReferenceCountsForIngestJobs.containsKey(ingestJobId)) { - moduleReferenceCount = 1; + Long refCount = IngestModuleAdapter.moduleRefCountIncrementAndGet(ingestJobId); + if (refCount == 1) { artifactCountsForIngestJobs.put(ingestJobId, 0L); - } else { - moduleReferenceCount = moduleReferenceCountsForIngestJobs.get(ingestJobId); - ++moduleReferenceCount; } - moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount); } synchronized static void addToBlackboardPostCount(long ingestJobId, long countToAdd) { @@ -251,9 +180,8 @@ class SampleFileIngestModule implements FileIngestModule { } synchronized static void reportBlackboardPostCount(long ingestJobId) { - Integer moduleReferenceCount = moduleReferenceCountsForIngestJobs.remove(ingestJobId); - --moduleReferenceCount; - if (moduleReferenceCount == 0) { + Long refCount = IngestModuleAdapter.moduleRefCountDecrementAndGet(ingestJobId); + if (refCount == 0) { Long filesCount = artifactCountsForIngestJobs.remove(ingestJobId); String msgText = String.format("Posted %d times to the blackboard", filesCount); IngestMessage message = IngestMessage.createMessage( @@ -261,8 +189,6 @@ class SampleFileIngestModule implements FileIngestModule { SampleIngestModuleFactory.getModuleName(), msgText); IngestServices.getInstance().postMessage(message); - } else { - moduleReferenceCountsForIngestJobs.put(ingestJobId, moduleReferenceCount); } } } diff --git a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java index 101ea80dae..d0ac2e3f32 100755 --- a/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java +++ b/Core/src/org/sleuthkit/autopsy/ingest/IngestModuleAdapter.java @@ -28,7 +28,7 @@ public abstract class IngestModuleAdapter implements IngestModule { // Maps a JobId to the count of instances static HashMap moduleRefCount = new HashMap<>(); - public static synchronized long moduleRefCountIncrement(long jobId) { + public static synchronized long moduleRefCountIncrementAndGet(long jobId) { long count = moduleRefCount.containsKey(jobId) ? moduleRefCount.get(jobId) : 0; long nextCount = count + 1; moduleRefCount.put(jobId, nextCount); diff --git a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java index 1fa1250960..5c55c5e1bd 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/exif/ExifParserFileIngestModule.java @@ -69,7 +69,7 @@ public final class ExifParserFileIngestModule extends IngestModuleAdapter implem @Override public void startUp(IngestJobContext context) throws IngestModuleException { jobId = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobId); + IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId); } diff --git a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java index f7bb46f95f..3c3506e3d7 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/fileextmismatch/FileExtMismatchIngestModule.java @@ -61,7 +61,7 @@ public class FileExtMismatchIngestModule extends IngestModuleAdapter implements @Override public void startUp(IngestJobContext context) throws IngestModuleException { jobId = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobId); + IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId); FileExtMismatchXML xmlLoader = FileExtMismatchXML.getDefault(); SigTypeToExtMap = xmlLoader.load(); } diff --git a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java index 48e1a997f4..3bc7dc68d1 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/filetypeid/FileTypeIdIngestModule.java @@ -61,7 +61,7 @@ public class FileTypeIdIngestModule extends IngestModuleAdapter implements FileI @Override public void startUp(IngestJobContext context) throws IngestModuleException { jobId = context.getJobId(); - IngestModuleAdapter.moduleRefCountIncrement(jobId); + IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId); } @Override diff --git a/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java index 76ffa70926..82134a7e7e 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/sevenzip/SevenZipIngestModule.java @@ -124,7 +124,7 @@ public final class SevenZipIngestModule extends IngestModuleAdapter implements F } // if first instance of this module for this job then check 7zip init - if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) { try { SevenZip.initSevenZipFromPlatformJAR(); String platform = SevenZip.getUsedPlatform(); diff --git a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java index 97cf92b32f..f56bac451e 100644 --- a/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java +++ b/HashDatabase/src/org/sleuthkit/autopsy/hashdatabase/HashDbIngestModule.java @@ -70,7 +70,7 @@ public class HashDbIngestModule extends IngestModuleAdapter implements FileInges getEnabledHashSets(hashDbManager.getKnownBadFileHashSets(), knownBadHashSets); getEnabledHashSets(hashDbManager.getKnownFileHashSets(), knownHashSets); - if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) { // if first module for this job then post error msgs if needed if (knownBadHashSets.isEmpty()) { diff --git a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java index fb7b5cc3d5..392c7c238d 100644 --- a/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java +++ b/KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchIngestModule.java @@ -130,7 +130,7 @@ public final class KeywordSearchIngestModule extends IngestModuleAdapter impleme // increment the module reference count // if first instance of this module for this job then check the server and existence of keywords - if (IngestModuleAdapter.moduleRefCountIncrement(jobId) == 1) { + if (IngestModuleAdapter.moduleRefCountIncrementAndGet(jobId) == 1) { final Server server = KeywordSearch.getServer(); try { if (!server.isRunning()) {