mirror of
https://github.com/overcuriousity/autopsy-flatpak.git
synced 2025-07-08 14:19:32 +00:00
Merge branch 'develop' of https://github.com/sleuthkit/autopsy into messaging_prototype
This commit is contained in:
commit
d2e954b28d
@ -256,8 +256,9 @@ final class InterestingItemDefsPanel extends IngestModuleGlobalSettingsPanel imp
|
||||
option = JOptionPane.showConfirmDialog(null, panel, NbBundle.getMessage(FilesSetPanel.class, "FilesSetPanel.title"), JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE);
|
||||
} while (option == JOptionPane.OK_OPTION && !panel.isValidDefinition());
|
||||
|
||||
// If rule set with same name already exists, do not add to the filesSets hashMap.
|
||||
if(this.filesSets.containsKey(panel.getFilesSetName())) {
|
||||
// While adding new ruleset(selectedSet == null), if rule set with same name already exists, do not add to the filesSets hashMap.
|
||||
// In case of editing an existing ruleset(selectedSet != null), following check is not performed.
|
||||
if(this.filesSets.containsKey(panel.getFilesSetName()) && selectedSet == null) {
|
||||
MessageNotifyUtil.Message.error(NbBundle.getMessage(this.getClass(),
|
||||
"InterestingItemDefsPanel.doFileSetsDialog.duplicateRuleSet.text",
|
||||
panel.getFilesSetName()));
|
||||
@ -315,7 +316,7 @@ final class InterestingItemDefsPanel extends IngestModuleGlobalSettingsPanel imp
|
||||
rules.remove(selectedRule.getUuid());
|
||||
}
|
||||
FilesSet.Rule newRule = new FilesSet.Rule(panel.getRuleName(), panel.getFileNameFilter(), panel.getMetaTypeFilter(), panel.getPathFilter());
|
||||
rules.put(Integer.toString(newRule.hashCode()), newRule);
|
||||
rules.put(newRule.getUuid(), newRule);
|
||||
|
||||
// Add the new/edited files set definition, replacing any previous
|
||||
// definition with the same name and refreshing the display.
|
||||
|
@ -55,8 +55,8 @@ class EvalDomainObj extends EvaluatableObject {
|
||||
// Since we have single URL artifacts, ALL and NONE conditions probably don't make sense to test
|
||||
if (!((obj.getValue().getApplyCondition() == null)
|
||||
|| (obj.getValue().getApplyCondition() == ConditionApplicationEnum.ANY))) {
|
||||
return new ObservableResult(id, "URIObject: Can not process apply condition " + obj.getValue().getApplyCondition().toString() //NON-NLS
|
||||
+ " on URI object", spacing, ObservableResult.ObservableState.INDETERMINATE, null); //NON-NLS
|
||||
return new ObservableResult(id, "DomainObject: Can not process apply condition " + obj.getValue().getApplyCondition().toString() //NON-NLS
|
||||
+ " on Domain object", spacing, ObservableResult.ObservableState.INDETERMINATE, null); //NON-NLS
|
||||
}
|
||||
|
||||
// If the condition is not "CONTAINS", add a warning that it's being ignored
|
||||
|
@ -171,7 +171,21 @@ class EvalFileObj extends EvaluatableObject {
|
||||
for (HashType h : obj.getHashes().getHashes()) {
|
||||
if (h.getSimpleHashValue() != null) {
|
||||
if (h.getType().getValue().equals("MD5")) { //NON-NLS
|
||||
String newClause = "md5=\'" + h.getSimpleHashValue().getValue().toString().toLowerCase() + "\'"; //NON-NLS
|
||||
String newClause = "";
|
||||
if(h.getSimpleHashValue().getValue().toString().toLowerCase().contains("##comma##")){
|
||||
String[] parts = h.getSimpleHashValue().getValue().toString().toLowerCase().split("##comma##"); //NON-NLS
|
||||
String hashList = "";
|
||||
for(String s:parts){
|
||||
if(!hashList.isEmpty()){
|
||||
hashList += ", ";
|
||||
}
|
||||
hashList += "\'" + s + "\'";
|
||||
}
|
||||
newClause = "md5 IN (" + hashList + ")";
|
||||
}
|
||||
else{
|
||||
newClause = "md5=\'" + h.getSimpleHashValue().getValue().toString().toLowerCase() + "\'"; //NON-NLS
|
||||
}
|
||||
whereClause = addClause(whereClause, newClause);
|
||||
} else {
|
||||
addWarning("Could not process hash type " + h.getType().getValue().toString()); //NON-NLS
|
||||
|
@ -72,8 +72,8 @@ public class STIXReportModule implements GeneralReportModule {
|
||||
private String reportPath;
|
||||
private boolean reportAllResults;
|
||||
|
||||
private final Map<String, ObjectType> idToObjectMap = new HashMap<String, ObjectType>();
|
||||
private final Map<String, ObservableResult> idToResult = new HashMap<String, ObservableResult>();
|
||||
private Map<String, ObjectType> idToObjectMap = new HashMap<String, ObjectType>();
|
||||
private Map<String, ObservableResult> idToResult = new HashMap<String, ObservableResult>();
|
||||
|
||||
private List<EvalRegistryObj.RegistryFileInfo> registryFileData = null;
|
||||
|
||||
@ -189,6 +189,10 @@ public class STIXReportModule implements GeneralReportModule {
|
||||
MessageNotifyUtil.MessageType.ERROR);
|
||||
hadErrors = true;
|
||||
}
|
||||
|
||||
// Clear out the ID maps before loading the next file
|
||||
idToObjectMap = new HashMap<String, ObjectType>();
|
||||
idToResult = new HashMap<String, ObservableResult>();
|
||||
}
|
||||
|
||||
// Close the output file
|
||||
|
@ -42,6 +42,8 @@ import javafx.scene.layout.Priority;
|
||||
import javafx.scene.layout.VBox;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.openide.util.Exceptions;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined;
|
||||
import org.sleuthkit.autopsy.coreutils.ThreadConfined.ThreadType;
|
||||
import org.sleuthkit.autopsy.imagegallery.FXMLConstructor;
|
||||
import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
|
||||
import org.sleuthkit.autopsy.imagegallery.datamodel.DrawableAttribute;
|
||||
@ -205,6 +207,7 @@ public class NavPanel extends TabPane {
|
||||
*
|
||||
* @param grouping
|
||||
*/
|
||||
@ThreadConfined(type = ThreadType.JFX)
|
||||
public void setFocusedGroup(DrawableGroup grouping) {
|
||||
|
||||
List<String> path = groupingToPath(grouping);
|
||||
@ -212,7 +215,12 @@ public class NavPanel extends TabPane {
|
||||
final GroupTreeItem treeItemForGroup = ((GroupTreeItem) activeTreeProperty.get().getRoot()).getTreeItemForPath(path);
|
||||
|
||||
if (treeItemForGroup != null) {
|
||||
Platform.runLater(() -> {
|
||||
/* When we used to run the below code on the FX thread, it would
|
||||
* get into infinite loops when the next group button was pressed quickly
|
||||
* because the udpates became out of order and History could not keep
|
||||
* track of what was current. Currently (4/2/15), this method is
|
||||
* already on the FX thread, so it is OK. */
|
||||
//Platform.runLater(() -> {
|
||||
TreeItem<TreeNode> ti = treeItemForGroup;
|
||||
while (ti != null) {
|
||||
ti.setExpanded(true);
|
||||
@ -223,7 +231,7 @@ public class NavPanel extends TabPane {
|
||||
activeTreeProperty.get().getSelectionModel().select(treeItemForGroup);
|
||||
activeTreeProperty.get().scrollTo(row);
|
||||
}
|
||||
});
|
||||
//});
|
||||
}
|
||||
}
|
||||
|
||||
|
0
docs/doxygen-user/images/ingest_pipeline.PNG
Executable file → Normal file
0
docs/doxygen-user/images/ingest_pipeline.PNG
Executable file → Normal file
Before Width: | Height: | Size: 17 KiB After Width: | Height: | Size: 17 KiB |
Binary file not shown.
Before Width: | Height: | Size: 17 KiB |
@ -247,8 +247,8 @@ class TestRunner(object):
|
||||
if test_data.main_config.timing:
|
||||
print("Run time test passed: ", test_data.run_time_passed)
|
||||
test_data.overall_passed = (test_data.html_report_passed and
|
||||
test_data.errors_diff_passed and test_data.db_diff_passed and
|
||||
test_data.run_time_passed)
|
||||
test_data.errors_diff_passed and test_data.db_diff_passed)
|
||||
# test_data.run_time_passed not considered for test_data.overall_passed
|
||||
# otherwise, do the usual
|
||||
else:
|
||||
test_data.overall_passed = (test_data.html_report_passed and
|
||||
@ -345,8 +345,8 @@ class TestRunner(object):
|
||||
if file_exists(test_data.get_sorted_data_path(DBType.OUTPUT)):
|
||||
shutil.copy(test_data.get_sorted_data_path(DBType.OUTPUT), dataoutpth)
|
||||
shutil.copy(dbdumpinpth, dbdumpoutpth)
|
||||
error_pth = make_path(tmpdir, test_data.image_name+"SortedErrors.txt")
|
||||
shutil.copy(test_data.sorted_log, error_pth)
|
||||
error_pth = make_path(tmpdir, test_data.image_name+"Exceptions.txt")
|
||||
shutil.copy(test_data.common_log_path, error_pth)
|
||||
except IOError as e:
|
||||
Errors.print_error(str(e))
|
||||
print(str(e))
|
||||
@ -451,7 +451,6 @@ class TestData(object):
|
||||
antlog_dir: a pathto_File, the antlog.txt file
|
||||
test_dbdump: a pathto_File, the database dump, IMAGENAMEDump.txt
|
||||
common_log_path: a pathto_File, the IMAGE_NAMECOMMON_LOG file
|
||||
sorted_log: a pathto_File, the IMAGENAMESortedErrors.txt file
|
||||
reports_dir: a pathto_Dir, the AutopsyTestCase/Reports folder
|
||||
gold_data_dir: a pathto_Dir, the gold standard directory
|
||||
gold_archive: a pathto_File, the gold standard archive
|
||||
@ -500,7 +499,6 @@ class TestData(object):
|
||||
self.test_dbdump = make_path(self.output_path, self.image_name +
|
||||
"DBDump.txt")
|
||||
self.common_log_path = make_local_path(self.output_path, self.image_name + COMMON_LOG)
|
||||
self.sorted_log = make_local_path(self.output_path, self.image_name + "SortedErrors.txt")
|
||||
self.reports_dir = make_path(self.output_path, AUTOPSY_TEST_CASE, "Reports")
|
||||
self.gold_data_dir = make_path(self.main_config.img_gold, self.image_name)
|
||||
self.gold_archive = make_path(self.main_config.gold,
|
||||
@ -579,13 +577,13 @@ class TestData(object):
|
||||
return self._get_path_to_file(file_type, "BlackboardDump.txt")
|
||||
|
||||
def get_sorted_errors_path(self, file_type):
|
||||
"""Get the path to the SortedErrors file that corresponds to the given
|
||||
"""Get the path to the Exceptions (SortedErrors) file that corresponds to the given
|
||||
DBType.
|
||||
|
||||
Args:
|
||||
file_type: the DBType of the path to be generated
|
||||
"""
|
||||
return self._get_path_to_file(file_type, "SortedErrors.txt")
|
||||
return self._get_path_to_file(file_type, "Exceptions.txt")
|
||||
|
||||
def get_db_dump_path(self, file_type):
|
||||
"""Get the path to the DBDump file that corresponds to the given DBType.
|
||||
@ -1261,9 +1259,6 @@ class Logs(object):
|
||||
logs_path = test_data.logs_dir
|
||||
common_log = codecs.open(test_data.common_log_path, "w", "utf_8")
|
||||
warning_log = codecs.open(test_data.warning_log, "w", "utf_8")
|
||||
common_log.write("--------------------------------------------------\n")
|
||||
common_log.write(test_data.image_name + "\n")
|
||||
common_log.write("--------------------------------------------------\n")
|
||||
rep_path = make_local_path(test_data.main_config.output_dir)
|
||||
rep_path = rep_path.replace("\\\\", "\\")
|
||||
for file in os.listdir(logs_path):
|
||||
@ -1281,8 +1276,8 @@ class Logs(object):
|
||||
log.close()
|
||||
common_log.write("\n")
|
||||
common_log.close()
|
||||
print(test_data.sorted_log)
|
||||
srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.sorted_log]
|
||||
print(test_data.common_log_path)
|
||||
srtcmdlst = ["sort", test_data.common_log_path, "-o", test_data.common_log_path]
|
||||
subprocess.call(srtcmdlst)
|
||||
except (OSError, IOError) as e:
|
||||
Errors.print_error("Error: Unable to generate the common log.")
|
||||
|
Loading…
x
Reference in New Issue
Block a user