diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemDefsPanel.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemDefsPanel.java index 70d7ec3d64..f85f0805b5 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemDefsPanel.java +++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/InterestingItemDefsPanel.java @@ -256,8 +256,9 @@ final class InterestingItemDefsPanel extends IngestModuleGlobalSettingsPanel imp option = JOptionPane.showConfirmDialog(null, panel, NbBundle.getMessage(FilesSetPanel.class, "FilesSetPanel.title"), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE); } while (option == JOptionPane.OK_OPTION && !panel.isValidDefinition()); - // If rule set with same name already exists, do not add to the filesSets hashMap. - if(this.filesSets.containsKey(panel.getFilesSetName())) { + // While adding new ruleset(selectedSet == null), if rule set with same name already exists, do not add to the filesSets hashMap. + // In case of editing an existing ruleset(selectedSet != null), following check is not performed. + if(this.filesSets.containsKey(panel.getFilesSetName()) && selectedSet == null) { MessageNotifyUtil.Message.error(NbBundle.getMessage(this.getClass(), "InterestingItemDefsPanel.doFileSetsDialog.duplicateRuleSet.text", panel.getFilesSetName())); @@ -315,7 +316,7 @@ final class InterestingItemDefsPanel extends IngestModuleGlobalSettingsPanel imp rules.remove(selectedRule.getUuid()); } FilesSet.Rule newRule = new FilesSet.Rule(panel.getRuleName(), panel.getFileNameFilter(), panel.getMetaTypeFilter(), panel.getPathFilter()); - rules.put(Integer.toString(newRule.hashCode()), newRule); + rules.put(newRule.getUuid(), newRule); // Add the new/edited files set definition, replacing any previous // definition with the same name and refreshing the display. diff --git a/Core/src/org/sleuthkit/autopsy/modules/stix/EvalDomainObj.java b/Core/src/org/sleuthkit/autopsy/modules/stix/EvalDomainObj.java index 18aaf37e8e..03a8041c1f 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/stix/EvalDomainObj.java +++ b/Core/src/org/sleuthkit/autopsy/modules/stix/EvalDomainObj.java @@ -55,8 +55,8 @@ class EvalDomainObj extends EvaluatableObject { // Since we have single URL artifacts, ALL and NONE conditions probably don't make sense to test if (!((obj.getValue().getApplyCondition() == null) || (obj.getValue().getApplyCondition() == ConditionApplicationEnum.ANY))) { - return new ObservableResult(id, "URIObject: Can not process apply condition " + obj.getValue().getApplyCondition().toString() //NON-NLS - + " on URI object", spacing, ObservableResult.ObservableState.INDETERMINATE, null); //NON-NLS + return new ObservableResult(id, "DomainObject: Can not process apply condition " + obj.getValue().getApplyCondition().toString() //NON-NLS + + " on Domain object", spacing, ObservableResult.ObservableState.INDETERMINATE, null); //NON-NLS } // If the condition is not "CONTAINS", add a warning that it's being ignored diff --git a/Core/src/org/sleuthkit/autopsy/modules/stix/EvalFileObj.java b/Core/src/org/sleuthkit/autopsy/modules/stix/EvalFileObj.java index 384d40a899..9f05e44e5c 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/stix/EvalFileObj.java +++ b/Core/src/org/sleuthkit/autopsy/modules/stix/EvalFileObj.java @@ -171,7 +171,21 @@ class EvalFileObj extends EvaluatableObject { for (HashType h : obj.getHashes().getHashes()) { if (h.getSimpleHashValue() != null) { if (h.getType().getValue().equals("MD5")) { //NON-NLS - String newClause = "md5=\'" + h.getSimpleHashValue().getValue().toString().toLowerCase() + "\'"; //NON-NLS + String newClause = ""; + if(h.getSimpleHashValue().getValue().toString().toLowerCase().contains("##comma##")){ + String[] parts = h.getSimpleHashValue().getValue().toString().toLowerCase().split("##comma##"); //NON-NLS + String hashList = ""; + for(String s:parts){ + if(!hashList.isEmpty()){ + hashList += ", "; + } + hashList += "\'" + s + "\'"; + } + newClause = "md5 IN (" + hashList + ")"; + } + else{ + newClause = "md5=\'" + h.getSimpleHashValue().getValue().toString().toLowerCase() + "\'"; //NON-NLS + } whereClause = addClause(whereClause, newClause); } else { addWarning("Could not process hash type " + h.getType().getValue().toString()); //NON-NLS diff --git a/Core/src/org/sleuthkit/autopsy/modules/stix/STIXReportModule.java b/Core/src/org/sleuthkit/autopsy/modules/stix/STIXReportModule.java index 6431f4ab87..e9f7bd4c43 100644 --- a/Core/src/org/sleuthkit/autopsy/modules/stix/STIXReportModule.java +++ b/Core/src/org/sleuthkit/autopsy/modules/stix/STIXReportModule.java @@ -72,8 +72,8 @@ public class STIXReportModule implements GeneralReportModule { private String reportPath; private boolean reportAllResults; - private final Map idToObjectMap = new HashMap(); - private final Map idToResult = new HashMap(); + private Map idToObjectMap = new HashMap(); + private Map idToResult = new HashMap(); private List registryFileData = null; @@ -189,6 +189,10 @@ public class STIXReportModule implements GeneralReportModule { MessageNotifyUtil.MessageType.ERROR); hadErrors = true; } + + // Clear out the ID maps before loading the next file + idToObjectMap = new HashMap(); + idToResult = new HashMap(); } // Close the output file diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/NavPanel.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/NavPanel.java index 70601742bf..75f98c010c 100644 --- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/NavPanel.java +++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/navpanel/NavPanel.java @@ -42,6 +42,8 @@ import javafx.scene.layout.Priority; import javafx.scene.layout.VBox; import org.apache.commons.lang3.StringUtils; import org.openide.util.Exceptions; +import org.sleuthkit.autopsy.coreutils.ThreadConfined; +import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType; import org.sleuthkit.autopsy.imagegallery.FXMLConstructor; import org.sleuthkit.autopsy.imagegallery.ImageGalleryController; import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableAttribute; @@ -205,6 +207,7 @@ public class NavPanel extends TabPane { * * @param grouping */ + @ThreadConfined(type = ThreadType.JFX) public void setFocusedGroup(DrawableGroup grouping) { List path = groupingToPath(grouping); @@ -212,7 +215,12 @@ public class NavPanel extends TabPane { final GroupTreeItem treeItemForGroup = ((GroupTreeItem) activeTreeProperty.get().getRoot()).getTreeItemForPath(path); if (treeItemForGroup != null) { - Platform.runLater(() -> { + /* When we used to run the below code on the FX thread, it would + * get into infinite loops when the next group button was pressed quickly + * because the udpates became out of order and History could not keep + * track of what was current. Currently (4/2/15), this method is + * already on the FX thread, so it is OK. */ + //Platform.runLater(() -> { TreeItem ti = treeItemForGroup; while (ti != null) { ti.setExpanded(true); @@ -223,7 +231,7 @@ public class NavPanel extends TabPane { activeTreeProperty.get().getSelectionModel().select(treeItemForGroup); activeTreeProperty.get().scrollTo(row); } - }); + //}); } } diff --git a/docs/doxygen-user/images/ingest_pipeline.PNG b/docs/doxygen-user/images/ingest_pipeline.PNG old mode 100755 new mode 100644 diff --git a/docs/doxygen-user/images/ingest_pipeline.png b/docs/doxygen-user/images/ingest_pipeline.png deleted file mode 100644 index a91b73f642..0000000000 Binary files a/docs/doxygen-user/images/ingest_pipeline.png and /dev/null differ diff --git a/test/script/regression.py b/test/script/regression.py index 08938f371b..156a4f1b07 100755 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -247,8 +247,8 @@ class TestRunner(object): if test_data.main_config.timing: print("Run time test passed: ", test_data.run_time_passed) test_data.overall_passed = (test_data.html_report_passed and - test_data.errors_diff_passed and test_data.db_diff_passed and - test_data.run_time_passed) + test_data.errors_diff_passed and test_data.db_diff_passed) + # test_data.run_time_passed not considered for test_data.overall_passed # otherwise, do the usual else: test_data.overall_passed = (test_data.html_report_passed and @@ -345,8 +345,8 @@ class TestRunner(object): if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)): shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth) shutil.copy(dbdumpinpth, dbdumpoutpth) - error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt") - shutil.copy(test_data.sorted_log, error_pth) + error_pth = make_path(tmpdir, test_data.image_name+"Exceptions.txt") + shutil.copy(test_data.common_log_path, error_pth) except IOError as e: Errors.print_error(str(e)) print(str(e)) @@ -451,7 +451,6 @@ class TestData(object): antlog_dir: a pathto_File, the antlog.txt file test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file - sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder gold_data_dir: a pathto_Dir, the gold standard directory gold_archive: a pathto_File, the gold standard archive @@ -500,7 +499,6 @@ class TestData(object): self.test_dbdump = make_path(self.output_path, self.image_name + "DBDump.txt") self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG) - self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt") self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports") self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name) self.gold_archive = make_path(self.main_config.gold, @@ -579,13 +577,13 @@ class TestData(object): return self._get_path_to_file(file_type, "BlackboardDump.txt") def get_sorted_errors_path(self, file_type): - """Get the path to the SortedErrors file that corresponds to the given + """Get the path to the Exceptions (SortedErrors) file that corresponds to the given DBType. Args: file_type: the DBType of the path to be generated """ - return self._get_path_to_file(file_type, "SortedErrors.txt") + return self._get_path_to_file(file_type, "Exceptions.txt") def get_db_dump_path(self, file_type): """Get the path to the DBDump file that corresponds to the given DBType. @@ -1261,9 +1259,6 @@ class Logs(object): logs_path = test_data.logs_dir common_log = codecs.open(test_data.common_log_path, "w", "utf_8") warning_log = codecs.open(test_data.warning_log, "w", "utf_8") - common_log.write("--------------------------------------------------\n") - common_log.write(test_data.image_name + "\n") - common_log.write("--------------------------------------------------\n") rep_path = make_local_path(test_data.main_config.output_dir) rep_path = rep_path.replace("\\\\", "\\") for file in os.listdir(logs_path): @@ -1281,8 +1276,8 @@ class Logs(object): log.close() common_log.write("\n") common_log.close() - print(test_data.sorted_log) - srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log] + print(test_data.common_log_path) + srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.common_log_path] subprocess.call(srtcmdlst) except (OSError, IOError) as e: Errors.print_error("Error: Unable to generate the common log.")